Make exports consistent
This commit is contained in:
parent
3ec93f42c6
commit
725f01d8da
@ -1,7 +1,7 @@
|
|||||||
import Dexie from "dexie";
|
import Dexie from "dexie";
|
||||||
import { templateSeeds } from "./seeds";
|
import { templateSeeds } from "./seeds";
|
||||||
|
|
||||||
const db = new Dexie("drawDB");
|
export const db = new Dexie("drawDB");
|
||||||
|
|
||||||
db.version(4).stores({
|
db.version(4).stores({
|
||||||
diagrams: "++id, lastModified",
|
diagrams: "++id, lastModified",
|
||||||
@ -11,5 +11,3 @@ db.version(4).stores({
|
|||||||
db.on("populate", (transaction) => {
|
db.on("populate", (transaction) => {
|
||||||
transaction.templates.bulkAdd(templateSeeds).catch((e) => console.log(e));
|
transaction.templates.bulkAdd(templateSeeds).catch((e) => console.log(e));
|
||||||
});
|
});
|
||||||
|
|
||||||
export { db };
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
const tableSchema = {
|
export const tableSchema = {
|
||||||
type: "object",
|
type: "object",
|
||||||
properties: {
|
properties: {
|
||||||
id: { type: "integer" },
|
id: { type: "integer" },
|
||||||
@ -56,7 +56,7 @@ const tableSchema = {
|
|||||||
required: ["id", "name", "x", "y", "fields", "comment", "indices", "color"],
|
required: ["id", "name", "x", "y", "fields", "comment", "indices", "color"],
|
||||||
};
|
};
|
||||||
|
|
||||||
const areaSchema = {
|
export const areaSchema = {
|
||||||
type: "object",
|
type: "object",
|
||||||
properties: {
|
properties: {
|
||||||
id: { type: "integer" },
|
id: { type: "integer" },
|
||||||
@ -70,7 +70,7 @@ const areaSchema = {
|
|||||||
required: ["id", "name", "x", "y", "width", "height", "color"],
|
required: ["id", "name", "x", "y", "width", "height", "color"],
|
||||||
};
|
};
|
||||||
|
|
||||||
const noteSchema = {
|
export const noteSchema = {
|
||||||
type: "object",
|
type: "object",
|
||||||
properties: {
|
properties: {
|
||||||
id: { type: "integer" },
|
id: { type: "integer" },
|
||||||
@ -84,7 +84,7 @@ const noteSchema = {
|
|||||||
required: ["id", "x", "y", "title", "content", "color", "height"],
|
required: ["id", "x", "y", "title", "content", "color", "height"],
|
||||||
};
|
};
|
||||||
|
|
||||||
const typeSchema = {
|
export const typeSchema = {
|
||||||
type: "object",
|
type: "object",
|
||||||
properties: {
|
properties: {
|
||||||
name: { type: "string" },
|
name: { type: "string" },
|
||||||
@ -108,7 +108,7 @@ const typeSchema = {
|
|||||||
required: ["name", "fields", "comment"],
|
required: ["name", "fields", "comment"],
|
||||||
};
|
};
|
||||||
|
|
||||||
const jsonSchema = {
|
export const jsonSchema = {
|
||||||
type: "object",
|
type: "object",
|
||||||
properties: {
|
properties: {
|
||||||
tables: {
|
tables: {
|
||||||
@ -166,7 +166,7 @@ const jsonSchema = {
|
|||||||
required: ["tables", "relationships", "notes", "subjectAreas"],
|
required: ["tables", "relationships", "notes", "subjectAreas"],
|
||||||
};
|
};
|
||||||
|
|
||||||
const ddbSchema = {
|
export const ddbSchema = {
|
||||||
type: "object",
|
type: "object",
|
||||||
properties: {
|
properties: {
|
||||||
author: { type: "string" },
|
author: { type: "string" },
|
||||||
@ -176,12 +176,3 @@ const ddbSchema = {
|
|||||||
...jsonSchema.properties,
|
...jsonSchema.properties,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export {
|
|
||||||
jsonSchema,
|
|
||||||
ddbSchema,
|
|
||||||
tableSchema,
|
|
||||||
noteSchema,
|
|
||||||
areaSchema,
|
|
||||||
typeSchema,
|
|
||||||
};
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
const calcPath = (x1, x2, y1, y2, zoom = 1) => {
|
export function calcPath(x1, x2, y1, y2, zoom = 1) {
|
||||||
const tableWidth = 200 * zoom;
|
const tableWidth = 200 * zoom;
|
||||||
if (y1 <= y2) {
|
if (y1 <= y2) {
|
||||||
if (x1 + tableWidth <= x2) {
|
if (x1 + tableWidth <= x2) {
|
||||||
@ -107,6 +107,4 @@ const calcPath = (x1, x2, y1, y2, zoom = 1) => {
|
|||||||
} ${y2} L ${endX} ${y2}`;
|
} ${y2} L ${endX} ${y2}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
export { calcPath };
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
function enterFullscreen() {
|
export function enterFullscreen() {
|
||||||
const element = document.documentElement;
|
const element = document.documentElement;
|
||||||
if (element.requestFullscreen) {
|
if (element.requestFullscreen) {
|
||||||
element.requestFullscreen();
|
element.requestFullscreen();
|
||||||
@ -11,7 +11,7 @@ function enterFullscreen() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function exitFullscreen() {
|
export function exitFullscreen() {
|
||||||
if (document.exitFullscreen) {
|
if (document.exitFullscreen) {
|
||||||
document.exitFullscreen();
|
document.exitFullscreen();
|
||||||
} else if (document.mozCancelFullScreen) {
|
} else if (document.mozCancelFullScreen) {
|
||||||
@ -22,5 +22,3 @@ function exitFullscreen() {
|
|||||||
document.msExitFullscreen();
|
document.msExitFullscreen();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { enterFullscreen, exitFullscreen };
|
|
||||||
|
@ -63,7 +63,7 @@ function checkDefault(field) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getIssues(diagram) {
|
export function getIssues(diagram) {
|
||||||
const issues = [];
|
const issues = [];
|
||||||
const duplicateTableNames = {};
|
const duplicateTableNames = {};
|
||||||
|
|
||||||
@ -240,5 +240,3 @@ function getIssues(diagram) {
|
|||||||
|
|
||||||
return issues;
|
return issues;
|
||||||
}
|
}
|
||||||
|
|
||||||
export { validateDateStr, getIssues };
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { sqlDataTypes } from "../data/constants";
|
import { sqlDataTypes } from "../data/constants";
|
||||||
|
|
||||||
function getJsonType(f) {
|
export function getJsonType(f) {
|
||||||
if (!sqlDataTypes.includes(f.type)) {
|
if (!sqlDataTypes.includes(f.type)) {
|
||||||
return '{ "type" : "object", additionalProperties : true }';
|
return '{ "type" : "object", additionalProperties : true }';
|
||||||
}
|
}
|
||||||
@ -30,7 +30,7 @@ function getJsonType(f) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateSchema(type) {
|
export function generateSchema(type) {
|
||||||
return `{\n\t\t\t"$schema": "http://json-schema.org/draft-04/schema#",\n\t\t\t"type": "object",\n\t\t\t"properties": {\n\t\t\t\t${type.fields
|
return `{\n\t\t\t"$schema": "http://json-schema.org/draft-04/schema#",\n\t\t\t"type": "object",\n\t\t\t"properties": {\n\t\t\t\t${type.fields
|
||||||
.map((f) => `"${f.name}" : ${getJsonType(f)}`)
|
.map((f) => `"${f.name}" : ${getJsonType(f)}`)
|
||||||
.join(
|
.join(
|
||||||
@ -38,7 +38,7 @@ function generateSchema(type) {
|
|||||||
)}\n\t\t\t},\n\t\t\t"additionalProperties": false\n\t\t}`;
|
)}\n\t\t\t},\n\t\t\t"additionalProperties": false\n\t\t}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getTypeString(field, dbms = "mysql", baseType = false) {
|
export function getTypeString(field, dbms = "mysql", baseType = false) {
|
||||||
if (dbms === "mysql") {
|
if (dbms === "mysql") {
|
||||||
if (field.type === "UUID") {
|
if (field.type === "UUID") {
|
||||||
return `VARCHAR(36)`;
|
return `VARCHAR(36)`;
|
||||||
@ -131,7 +131,7 @@ function getTypeString(field, dbms = "mysql", baseType = false) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function hasQuotes(type) {
|
export function hasQuotes(type) {
|
||||||
return [
|
return [
|
||||||
"CHAR",
|
"CHAR",
|
||||||
"VARCHAR",
|
"VARCHAR",
|
||||||
@ -145,7 +145,7 @@ function hasQuotes(type) {
|
|||||||
].includes(type);
|
].includes(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
function jsonToMySQL(obj) {
|
export function jsonToMySQL(obj) {
|
||||||
return `${obj.tables
|
return `${obj.tables
|
||||||
.map(
|
.map(
|
||||||
(table) =>
|
(table) =>
|
||||||
@ -213,7 +213,7 @@ function jsonToMySQL(obj) {
|
|||||||
.join("\n")}`;
|
.join("\n")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function jsonToPostgreSQL(obj) {
|
export function jsonToPostgreSQL(obj) {
|
||||||
return `${obj.types.map((type) => {
|
return `${obj.types.map((type) => {
|
||||||
const typeStatements = type.fields
|
const typeStatements = type.fields
|
||||||
.filter((f) => f.type === "ENUM" || f.type === "SET")
|
.filter((f) => f.type === "ENUM" || f.type === "SET")
|
||||||
@ -308,7 +308,7 @@ function jsonToPostgreSQL(obj) {
|
|||||||
.join("\n")}`;
|
.join("\n")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSQLiteType(field) {
|
export function getSQLiteType(field) {
|
||||||
switch (field.type) {
|
switch (field.type) {
|
||||||
case "INT":
|
case "INT":
|
||||||
case "SMALLINT":
|
case "SMALLINT":
|
||||||
@ -341,7 +341,7 @@ function getSQLiteType(field) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getInlineFK(table, obj) {
|
export function getInlineFK(table, obj) {
|
||||||
let fk = "";
|
let fk = "";
|
||||||
obj.references.forEach((r) => {
|
obj.references.forEach((r) => {
|
||||||
if (fk !== "") return;
|
if (fk !== "") return;
|
||||||
@ -356,7 +356,7 @@ function getInlineFK(table, obj) {
|
|||||||
return fk;
|
return fk;
|
||||||
}
|
}
|
||||||
|
|
||||||
function jsonToSQLite(obj) {
|
export function jsonToSQLite(obj) {
|
||||||
return obj.tables
|
return obj.tables
|
||||||
.map((table) => {
|
.map((table) => {
|
||||||
const inlineFK = getInlineFK(table, obj);
|
const inlineFK = getInlineFK(table, obj);
|
||||||
@ -409,7 +409,7 @@ function jsonToSQLite(obj) {
|
|||||||
.join("\n");
|
.join("\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
function jsonToMariaDB(obj) {
|
export function jsonToMariaDB(obj) {
|
||||||
return `${obj.tables
|
return `${obj.tables
|
||||||
.map(
|
.map(
|
||||||
(table) =>
|
(table) =>
|
||||||
@ -477,7 +477,7 @@ function jsonToMariaDB(obj) {
|
|||||||
.join("\n")}`;
|
.join("\n")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function jsonToSQLServer(obj) {
|
export function jsonToSQLServer(obj) {
|
||||||
return `${obj.types
|
return `${obj.types
|
||||||
.map((type) => {
|
.map((type) => {
|
||||||
return `${
|
return `${
|
||||||
@ -549,15 +549,15 @@ function jsonToSQLServer(obj) {
|
|||||||
.join("\n")}`;
|
.join("\n")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isSized(type) {
|
export function isSized(type) {
|
||||||
return ["CHAR", "VARCHAR", "BINARY", "VARBINARY", "TEXT"].includes(type);
|
return ["CHAR", "VARCHAR", "BINARY", "VARBINARY", "TEXT"].includes(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
function hasPrecision(type) {
|
export function hasPrecision(type) {
|
||||||
return ["DOUBLE", "NUMERIC", "DECIMAL", "FLOAT"].includes(type);
|
return ["DOUBLE", "NUMERIC", "DECIMAL", "FLOAT"].includes(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
function hasCheck(type) {
|
export function hasCheck(type) {
|
||||||
return [
|
return [
|
||||||
"INT",
|
"INT",
|
||||||
"SMALLINT",
|
"SMALLINT",
|
||||||
@ -572,7 +572,7 @@ function hasCheck(type) {
|
|||||||
].includes(type);
|
].includes(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSize(type) {
|
export function getSize(type) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case "CHAR":
|
case "CHAR":
|
||||||
case "BINARY":
|
case "BINARY":
|
||||||
@ -586,15 +586,3 @@ function getSize(type) {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export {
|
|
||||||
jsonToMySQL,
|
|
||||||
jsonToPostgreSQL,
|
|
||||||
isSized,
|
|
||||||
getSize,
|
|
||||||
hasPrecision,
|
|
||||||
hasCheck,
|
|
||||||
jsonToSQLite,
|
|
||||||
jsonToMariaDB,
|
|
||||||
jsonToSQLServer,
|
|
||||||
};
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
function dataURItoBlob(dataUrl) {
|
export function dataURItoBlob(dataUrl) {
|
||||||
const byteString = atob(dataUrl.split(",")[1]);
|
const byteString = atob(dataUrl.split(",")[1]);
|
||||||
const mimeString = dataUrl.split(",")[0].split(":")[1].split(";")[0];
|
const mimeString = dataUrl.split(",")[0].split(":")[1].split(";")[0];
|
||||||
const arrayBuffer = new ArrayBuffer(byteString.length);
|
const arrayBuffer = new ArrayBuffer(byteString.length);
|
||||||
@ -11,8 +11,6 @@ function dataURItoBlob(dataUrl) {
|
|||||||
return new Blob([intArray], { type: mimeString });
|
return new Blob([intArray], { type: mimeString });
|
||||||
}
|
}
|
||||||
|
|
||||||
function arrayIsEqual(arr1, arr2) {
|
export function arrayIsEqual(arr1, arr2) {
|
||||||
return JSON.stringify(arr1) === JSON.stringify(arr2);
|
return JSON.stringify(arr1) === JSON.stringify(arr2);
|
||||||
}
|
}
|
||||||
|
|
||||||
export { dataURItoBlob, arrayIsEqual };
|
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
import { Validator } from "jsonschema";
|
import { Validator } from "jsonschema";
|
||||||
import { ddbSchema, jsonSchema } from "../data/schemas";
|
import { ddbSchema, jsonSchema } from "../data/schemas";
|
||||||
|
|
||||||
function jsonDiagramIsValid(obj) {
|
export function jsonDiagramIsValid(obj) {
|
||||||
return new Validator().validate(obj, jsonSchema).valid;
|
return new Validator().validate(obj, jsonSchema).valid;
|
||||||
}
|
}
|
||||||
|
|
||||||
function ddbDiagramIsValid(obj) {
|
export function ddbDiagramIsValid(obj) {
|
||||||
return new Validator().validate(obj, ddbSchema).valid;
|
return new Validator().validate(obj, ddbSchema).valid;
|
||||||
}
|
}
|
||||||
|
|
||||||
export { jsonDiagramIsValid, ddbDiagramIsValid };
|
|
||||||
|
Loading…
Reference in New Issue
Block a user