Make exports consistent

This commit is contained in:
1ilit 2024-03-15 20:55:43 +02:00
parent 3ec93f42c6
commit 725f01d8da
8 changed files with 31 additions and 64 deletions

View File

@ -1,7 +1,7 @@
import Dexie from "dexie";
import { templateSeeds } from "./seeds";
const db = new Dexie("drawDB");
export const db = new Dexie("drawDB");
db.version(4).stores({
diagrams: "++id, lastModified",
@ -11,5 +11,3 @@ db.version(4).stores({
db.on("populate", (transaction) => {
transaction.templates.bulkAdd(templateSeeds).catch((e) => console.log(e));
});
export { db };

View File

@ -1,4 +1,4 @@
const tableSchema = {
export const tableSchema = {
type: "object",
properties: {
id: { type: "integer" },
@ -56,7 +56,7 @@ const tableSchema = {
required: ["id", "name", "x", "y", "fields", "comment", "indices", "color"],
};
const areaSchema = {
export const areaSchema = {
type: "object",
properties: {
id: { type: "integer" },
@ -70,7 +70,7 @@ const areaSchema = {
required: ["id", "name", "x", "y", "width", "height", "color"],
};
const noteSchema = {
export const noteSchema = {
type: "object",
properties: {
id: { type: "integer" },
@ -84,7 +84,7 @@ const noteSchema = {
required: ["id", "x", "y", "title", "content", "color", "height"],
};
const typeSchema = {
export const typeSchema = {
type: "object",
properties: {
name: { type: "string" },
@ -108,7 +108,7 @@ const typeSchema = {
required: ["name", "fields", "comment"],
};
const jsonSchema = {
export const jsonSchema = {
type: "object",
properties: {
tables: {
@ -166,7 +166,7 @@ const jsonSchema = {
required: ["tables", "relationships", "notes", "subjectAreas"],
};
const ddbSchema = {
export const ddbSchema = {
type: "object",
properties: {
author: { type: "string" },
@ -176,12 +176,3 @@ const ddbSchema = {
...jsonSchema.properties,
},
};
export {
jsonSchema,
ddbSchema,
tableSchema,
noteSchema,
areaSchema,
typeSchema,
};

View File

@ -1,4 +1,4 @@
const calcPath = (x1, x2, y1, y2, zoom = 1) => {
export function calcPath(x1, x2, y1, y2, zoom = 1) {
const tableWidth = 200 * zoom;
if (y1 <= y2) {
if (x1 + tableWidth <= x2) {
@ -107,6 +107,4 @@ const calcPath = (x1, x2, y1, y2, zoom = 1) => {
} ${y2} L ${endX} ${y2}`;
}
}
};
export { calcPath };
}

View File

@ -1,4 +1,4 @@
function enterFullscreen() {
export function enterFullscreen() {
const element = document.documentElement;
if (element.requestFullscreen) {
element.requestFullscreen();
@ -11,7 +11,7 @@ function enterFullscreen() {
}
}
function exitFullscreen() {
export function exitFullscreen() {
if (document.exitFullscreen) {
document.exitFullscreen();
} else if (document.mozCancelFullScreen) {
@ -22,5 +22,3 @@ function exitFullscreen() {
document.msExitFullscreen();
}
}
export { enterFullscreen, exitFullscreen };

View File

@ -63,7 +63,7 @@ function checkDefault(field) {
}
}
function getIssues(diagram) {
export function getIssues(diagram) {
const issues = [];
const duplicateTableNames = {};
@ -240,5 +240,3 @@ function getIssues(diagram) {
return issues;
}
export { validateDateStr, getIssues };

View File

@ -1,6 +1,6 @@
import { sqlDataTypes } from "../data/constants";
function getJsonType(f) {
export function getJsonType(f) {
if (!sqlDataTypes.includes(f.type)) {
return '{ "type" : "object", additionalProperties : true }';
}
@ -30,7 +30,7 @@ function getJsonType(f) {
}
}
function generateSchema(type) {
export function generateSchema(type) {
return `{\n\t\t\t"$schema": "http://json-schema.org/draft-04/schema#",\n\t\t\t"type": "object",\n\t\t\t"properties": {\n\t\t\t\t${type.fields
.map((f) => `"${f.name}" : ${getJsonType(f)}`)
.join(
@ -38,7 +38,7 @@ function generateSchema(type) {
)}\n\t\t\t},\n\t\t\t"additionalProperties": false\n\t\t}`;
}
function getTypeString(field, dbms = "mysql", baseType = false) {
export function getTypeString(field, dbms = "mysql", baseType = false) {
if (dbms === "mysql") {
if (field.type === "UUID") {
return `VARCHAR(36)`;
@ -131,7 +131,7 @@ function getTypeString(field, dbms = "mysql", baseType = false) {
}
}
function hasQuotes(type) {
export function hasQuotes(type) {
return [
"CHAR",
"VARCHAR",
@ -145,7 +145,7 @@ function hasQuotes(type) {
].includes(type);
}
function jsonToMySQL(obj) {
export function jsonToMySQL(obj) {
return `${obj.tables
.map(
(table) =>
@ -213,7 +213,7 @@ function jsonToMySQL(obj) {
.join("\n")}`;
}
function jsonToPostgreSQL(obj) {
export function jsonToPostgreSQL(obj) {
return `${obj.types.map((type) => {
const typeStatements = type.fields
.filter((f) => f.type === "ENUM" || f.type === "SET")
@ -308,7 +308,7 @@ function jsonToPostgreSQL(obj) {
.join("\n")}`;
}
function getSQLiteType(field) {
export function getSQLiteType(field) {
switch (field.type) {
case "INT":
case "SMALLINT":
@ -341,7 +341,7 @@ function getSQLiteType(field) {
}
}
function getInlineFK(table, obj) {
export function getInlineFK(table, obj) {
let fk = "";
obj.references.forEach((r) => {
if (fk !== "") return;
@ -356,7 +356,7 @@ function getInlineFK(table, obj) {
return fk;
}
function jsonToSQLite(obj) {
export function jsonToSQLite(obj) {
return obj.tables
.map((table) => {
const inlineFK = getInlineFK(table, obj);
@ -409,7 +409,7 @@ function jsonToSQLite(obj) {
.join("\n");
}
function jsonToMariaDB(obj) {
export function jsonToMariaDB(obj) {
return `${obj.tables
.map(
(table) =>
@ -477,7 +477,7 @@ function jsonToMariaDB(obj) {
.join("\n")}`;
}
function jsonToSQLServer(obj) {
export function jsonToSQLServer(obj) {
return `${obj.types
.map((type) => {
return `${
@ -549,15 +549,15 @@ function jsonToSQLServer(obj) {
.join("\n")}`;
}
function isSized(type) {
export function isSized(type) {
return ["CHAR", "VARCHAR", "BINARY", "VARBINARY", "TEXT"].includes(type);
}
function hasPrecision(type) {
export function hasPrecision(type) {
return ["DOUBLE", "NUMERIC", "DECIMAL", "FLOAT"].includes(type);
}
function hasCheck(type) {
export function hasCheck(type) {
return [
"INT",
"SMALLINT",
@ -572,7 +572,7 @@ function hasCheck(type) {
].includes(type);
}
function getSize(type) {
export function getSize(type) {
switch (type) {
case "CHAR":
case "BINARY":
@ -586,15 +586,3 @@ function getSize(type) {
return "";
}
}
export {
jsonToMySQL,
jsonToPostgreSQL,
isSized,
getSize,
hasPrecision,
hasCheck,
jsonToSQLite,
jsonToMariaDB,
jsonToSQLServer,
};

View File

@ -1,4 +1,4 @@
function dataURItoBlob(dataUrl) {
export function dataURItoBlob(dataUrl) {
const byteString = atob(dataUrl.split(",")[1]);
const mimeString = dataUrl.split(",")[0].split(":")[1].split(";")[0];
const arrayBuffer = new ArrayBuffer(byteString.length);
@ -11,8 +11,6 @@ function dataURItoBlob(dataUrl) {
return new Blob([intArray], { type: mimeString });
}
function arrayIsEqual(arr1, arr2) {
export function arrayIsEqual(arr1, arr2) {
return JSON.stringify(arr1) === JSON.stringify(arr2);
}
export { dataURItoBlob, arrayIsEqual };

View File

@ -1,12 +1,10 @@
import { Validator } from "jsonschema";
import { ddbSchema, jsonSchema } from "../data/schemas";
function jsonDiagramIsValid(obj) {
export function jsonDiagramIsValid(obj) {
return new Validator().validate(obj, jsonSchema).valid;
}
function ddbDiagramIsValid(obj) {
export function ddbDiagramIsValid(obj) {
return new Validator().validate(obj, ddbSchema).valid;
}
export { jsonDiagramIsValid, ddbDiagramIsValid };