Skip to content

Commit

Permalink
♻️ refactor: upload process
Browse files Browse the repository at this point in the history
  • Loading branch information
david-vaclavek committed Apr 17, 2024
1 parent 2570424 commit 8902c05
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 85 deletions.
17 changes: 11 additions & 6 deletions server/lifecycles/upload-event-entry-to-localazy-hook.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,20 @@ module.exports = async (event) => {
.plugin("localazy")
.service("localazyUploadService");

const chunks = LocalazyUploadService.splitToChunks(pickedFlatten);
const importFile = LocalazyUploadService.createImportFileRepresentation(
config.LOCALAZY_DEFAULT_FILE_NAME,
config.LOCALAZY_DEFAULT_FILE_PATH,
config.LOCALAZY_DEFAULT_FILE_EXTENSION,
eventEntryLocale,
chunks
pickedFlatten
);

const result = await LocalazyUploadService.upload(importFile);
const uploadConfig = {
contentOptions: {
type: config.LOCALAZY_DEFAULT_FILE_EXTENSION,
},
fileOptions: {
name: config.LOCALAZY_DEFAULT_FILE_NAME,
path: config.LOCALAZY_DEFAULT_FILE_PATH,
}
};
const result = await LocalazyUploadService.upload(importFile, uploadConfig);
return result;
}
20 changes: 12 additions & 8 deletions server/services/localazy-transfer-upload-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -146,17 +146,21 @@ module.exports = ({ strapi }) => ({
? isoStrapiToLocalazy(defaultLocale.code)
: config.LOCALAZY_DEFAULT_LOCALE;

const chunks = LocalazyUploadService.splitToChunks(flattenContent);
const importFile = LocalazyUploadService.createImportFileRepresentation(
config.LOCALAZY_DEFAULT_FILE_NAME,
config.LOCALAZY_DEFAULT_FILE_PATH,
config.LOCALAZY_DEFAULT_FILE_EXTENSION,
locale,
chunks
flattenContent
);
// Use `deprecate: "file"` if there is one chunk of transferred data only!
const hasMoreTransferFilesChunks = importFile.length > 1;
const uploadConfig = !hasMoreTransferFilesChunks ? { deprecate: "file" } : {};
// Use `deprecate: "file"` if there is one chunk of transferred data only (99900 keys)!
const uploadConfig = {
contentOptions: {
type: config.LOCALAZY_DEFAULT_FILE_EXTENSION,
},
i18nOptions: { deprecate: "file" },
fileOptions: {
name: config.LOCALAZY_DEFAULT_FILE_NAME,
path: config.LOCALAZY_DEFAULT_FILE_PATH,
}
};
await JobNotificationService.emit(UPLOAD_EVENT, {
message: "Uploading collections to Localazy...",
});
Expand Down
94 changes: 23 additions & 71 deletions server/services/localazy-upload-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,31 +12,24 @@ module.exports = ({ strapi }) => ({
* Use config to adjust the upload process.
* Returns status and id of a last chunk
*/
async upload(files, config = {}) {
let ret = {
success: false,
message: "No data was uploaded",
};
async upload(file, config = {}) {
try {
for (const file of files) {
const LocalazyApi = await localazyApiClientFactory();
const user = await strapi
.plugin("localazy")
.service("localazyUserService")
.getUser();
const LocalazyApi = await localazyApiClientFactory();
const user = await strapi
.plugin("localazy")
.service("localazyUserService")
.getUser();

const result = await LocalazyApi.import.json({
project: user.project.id,
json: file,
...config,
});

const result = await LocalazyApi.import({
projectId: user.project.id,
files: file,
...config,
});
await delay();
ret = {
success: true,
result: result.result,
};
}
return ret;
return {
success: true,
result,
};
} catch (e) {
strapi.log.error(e);
return {
Expand All @@ -46,57 +39,16 @@ module.exports = ({ strapi }) => ({
}
},

/**
* Check Directus for the Lifted Limits OAuth Apps IDs
*/
CHUNK_LIMIT: config.LOCALAZY_PUBLIC_API_LIFTED_LIMITS ? 99900 : 9990,

splitToChunks(data, CHUNK_LIMIT = null) {
const chunks = [];
const keys = Object.keys(data);
const keysCount = keys.length;
const localChunkLimit = CHUNK_LIMIT || this.CHUNK_LIMIT;
const chunksCount = Math.ceil(keysCount / localChunkLimit);
for (let i = 0; i < chunksCount; i += 1) {
const chunkStrings = {};
const from = localChunkLimit * i;
const to = localChunkLimit * (i + 1);

const currentKeys = keys.slice(from, to);
currentKeys.forEach((key) => {
chunkStrings[key] = data[key];
});
chunks.push(chunkStrings);
}

return chunks;
},

createImportFileRepresentation(
filename,
path,
type,
sourceLang,
stringsChunks
strings
) {
const files = [];

for (const strings of stringsChunks) {
const file = [
{
name: filename,
path,
content: {
type,
[sourceLang]: {
...strings,
},
},
},
];
files.push(file);
}
const file = {
[sourceLang]: {
...strings,
},
};

return files;
return file;
},
});

0 comments on commit 8902c05

Please sign in to comment.