From d02cfcf94ee90f0c5b2cd666621b5962d1e23078 Mon Sep 17 00:00:00 2001 From: dbroqua Date: Sat, 9 Apr 2022 00:00:04 +0200 Subject: [PATCH] {WIP} #37 --- README.md | 22 ++++++ docker-compose.yml.dev | 2 + docker-compose.yml.prod | 2 + package.json | 1 + src/app.js | 11 +-- src/config/index.js | 3 + src/libs/passport.js | 25 +++++-- src/middleware/Albums.js | 25 +------ src/middleware/Jobs.js | 128 ++++++++++++++++++++++++++++++++ src/models/{cron.js => jobs.js} | 6 +- src/routes/jobs.js | 40 ++++++++++ 11 files changed, 231 insertions(+), 34 deletions(-) create mode 100644 src/middleware/Jobs.js rename src/models/{cron.js => jobs.js} (71%) create mode 100644 src/routes/jobs.js diff --git a/README.md b/README.md index 4e40c47..e05aa15 100644 --- a/README.md +++ b/README.md @@ -184,6 +184,26 @@ server { Une fois le vhost activé (lien symbolique dans le dossier site-enable) et nginx rechargé votre site sera alors accessible en https. +### Jobs + +Par défaut toute les images des albums sont affichées depuis Discogs. Cependant avec les temps les urls deviennent invalides. Pour éviter cela lors de l'ajout d'un album à votre collection un job est créé. Ce job a pour rôle de stocker les images sur un bucket s3. + +Pour lancer les jobs il faut mettre en place une tâche cron qui sera éxécutée toute les heures (par exemple). + +Exemple de crontab : +```crontab +0 * * * * curl 'http://localhost:3001/jobs' \ + -H 'JOBS_HEADER_KEY: JOBS_HEADER_VALUE' \ + -H 'Accept: application/json' +30 * * * * curl 'http://localhost:3001/jobs?state=ERROR' \ + -H 'JOBS_HEADER_KEY: JOBS_HEADER_VALUE' \ + -H 'Accept: application/json' +``` + +N'oubliez pas de remplacer `localhost:30001`, `JOBS_HEADER_KEY` et `JOBS_HEADER_VALUE` par les bonnes valeurs. + +La première ligne permet de parcourir tous les nouveaux jobs alors que la seconde permet de relancer les jobs en erreurs (après 5 tentatives le job est marqué comme définitivement perdu). + ### Fichier .env {#env-file} Voici la liste des variables configurables : @@ -204,6 +224,8 @@ S3_ENDPOINT # Url de l'instance aws (s3.fr-par.scw.cloud pour scaleway france pa S3_SIGNATURE # Version de la signature AWS (s3v4 pour scaleway par exemple) S3_BASEFOLDER # Nom du sous dossier dans lequel seront mis les pochettes des albums S3_BUCKET # Nom du bucket +JOBS_HEADER_KEY # Nom du header utilisé pour l'identification des tâches cron (par exemple musictopus) +JOBS_HEADER_VALUE # Valeur de la clé ``` ## Contributeurs diff --git a/docker-compose.yml.dev b/docker-compose.yml.dev index 2f7e80e..dbe38b9 100644 --- a/docker-compose.yml.dev +++ b/docker-compose.yml.dev @@ -34,6 +34,8 @@ services: S3_BUCKET: ${S3_BUCKET} S3_ENDPOINT: ${S3_ENDPOINT} S3_SIGNATURE: ${S3_SIGNATURE} + JOBS_HEADER_KEY: ${JOBS_HEADER_KEY} + JOBS_HEADER_VALUE: ${JOBS_HEADER_VALUE} networks: - musictopus musictopus-db: diff --git a/docker-compose.yml.prod b/docker-compose.yml.prod index 59823cb..c343531 100644 --- a/docker-compose.yml.prod +++ b/docker-compose.yml.prod @@ -34,6 +34,8 @@ services: S3_BUCKET: ${S3_BUCKET} S3_ENDPOINT: ${S3_ENDPOINT} S3_SIGNATURE: ${S3_SIGNATURE} + JOBS_HEADER_KEY: ${JOBS_HEADER_KEY} + JOBS_HEADER_VALUE: ${JOBS_HEADER_VALUE} networks: - musictopus musictopus-db: diff --git a/package.json b/package.json index d346f25..d574778 100644 --- a/package.json +++ b/package.json @@ -61,6 +61,7 @@ "mongoose-unique-validator": "^3.0.0", "npm-run-all": "^4.1.5", "passport": "^0.5.2", + "passport-custom": "^1.1.1", "passport-http": "^0.3.0", "passport-local": "^1.0.0", "rimraf": "^3.0.2", diff --git a/src/app.js b/src/app.js index a3e80bd..c8a9574 100644 --- a/src/app.js +++ b/src/app.js @@ -7,6 +7,8 @@ import flash from "connect-flash"; import session from "express-session"; import MongoStore from "connect-mongo"; +import passportConfig from "./libs/passport"; + import config, { env, mongoDbUri, secret } from "./config"; import { isXhr } from "./helpers"; @@ -15,15 +17,13 @@ import indexRouter from "./routes"; import maCollectionRouter from "./routes/ma-collection"; import collectionRouter from "./routes/collection"; +import importJobsRouter from "./routes/jobs"; + import importAlbumRouterApiV1 from "./routes/api/v1/albums"; import importSearchRouterApiV1 from "./routes/api/v1/search"; import importMeRouterApiV1 from "./routes/api/v1/me"; -// Mongoose schema init -require("./models/users"); -require("./models/albums"); - -require("./libs/passport")(passport); +passportConfig(passport); mongoose .connect(mongoDbUri, { useNewUrlParser: true, useUnifiedTopology: true }) @@ -85,6 +85,7 @@ app.use( app.use("/", indexRouter); app.use("/ma-collection", maCollectionRouter); app.use("/collection", collectionRouter); +app.use("/jobs", importJobsRouter); app.use("/api/v1/albums", importAlbumRouterApiV1); app.use("/api/v1/search", importSearchRouterApiV1); app.use("/api/v1/me", importMeRouterApiV1); diff --git a/src/config/index.js b/src/config/index.js index 8070698..17b341d 100644 --- a/src/config/index.js +++ b/src/config/index.js @@ -14,4 +14,7 @@ module.exports = { s3Bucket: process.env.S3_BUCKET || "musictopus", s3Endpoint: process.env.S3_ENDPOINT || "s3.fr-par.scw.cloud", s3Signature: process.env.S3_SIGNATURE || "s3v4", + jobsHeaderKey: process.env.JOBS_HEADER_KEY || "musictopus", + jobsHeaderValue: + process.env.JOBS_HEADER_VALUE || "ooYee9xok7eigo2shiePohyoGh1eepew", }; diff --git a/src/libs/passport.js b/src/libs/passport.js index af0ab0b..a3ed7fe 100644 --- a/src/libs/passport.js +++ b/src/libs/passport.js @@ -1,11 +1,13 @@ /* eslint-disable func-names */ -const mongoose = require("mongoose"); -const LocalStrategy = require("passport-local").Strategy; -const { BasicStrategy } = require("passport-http"); +import { Strategy as LocalStrategy } from "passport-local"; +import { BasicStrategy } from "passport-http"; +import { Strategy as CustomStrategy } from "passport-custom"; -const Users = mongoose.model("Users"); +import Users from "../models/users"; -module.exports = function (passport) { +import { jobsHeaderKey, jobsHeaderValue } from "../config"; + +export default (passport) => { passport.serializeUser((user, done) => { done(null, user); }); @@ -55,4 +57,17 @@ module.exports = function (passport) { .catch(done); }) ); + passport.use( + "jobs", + new CustomStrategy((req, next) => { + const apiKey = req.headers[jobsHeaderKey]; + + if (apiKey === jobsHeaderValue) { + return next(null, { + username: "jobs", + }); + } + return next(null, false, "Oops! Identifiants incorrects"); + }) + ); }; diff --git a/src/middleware/Albums.js b/src/middleware/Albums.js index a0ead43..84f5c5d 100644 --- a/src/middleware/Albums.js +++ b/src/middleware/Albums.js @@ -1,11 +1,10 @@ -/* eslint-disable no-await-in-loop */ import moment from "moment"; import Pages from "./Pages"; import Export from "./Export"; import AlbumsModel from "../models/albums"; -import CronModel from "../models/cron"; +import JobsModel from "../models/jobs"; import UsersModel from "../models/users"; import ErrorEvent from "../libs/error"; // import { uploadFromUrl } from "../libs/aws"; @@ -31,34 +30,18 @@ class Albums extends Pages { : null; delete data.id; - // INFO: {POC} Pour chaque image on récupère une version que l'on stocke localement - // Utiliser un cron qui check la librairie pour mettre à jour les urls des images - // Mettre en cron l'id du nouvel élément créé pour me pas parser toute la bibliothèque à chaque fois - // if (data.thumb) { - // data.thumb = await uploadFromUrl(data.thumb); - // data.thumbType = "local"; - // } - // if (data.images && data.images.length > 0) { - // for (let i = 0; i < data.images.length; i += 1) { - // data.images[i].uri150 = await uploadFromUrl( - // data.images[i].uri150 - // ); - // data.images[i].uri = await uploadFromUrl(data.images[i].uri); - // } - // } - const album = new AlbumsModel(data); await album.save(); - const cronData = { + const jobData = { model: "Albums", id: album._id, }; - const cron = new CronModel(cronData); + const job = new JobsModel(jobData); - cron.save(); + job.save(); return album; } diff --git a/src/middleware/Jobs.js b/src/middleware/Jobs.js new file mode 100644 index 0000000..ccb2cf9 --- /dev/null +++ b/src/middleware/Jobs.js @@ -0,0 +1,128 @@ +/* eslint-disable no-await-in-loop */ +import ErrorEvent from "../libs/error"; +import { uploadFromUrl } from "../libs/aws"; +import { getAlbumDetails } from "../helpers"; + +import JobsModel from "../models/jobs"; +import AlbumsModel from "../models/albums"; + +class Jobs { + /** + * Méthode permettant de télécharger toute les images d'un album + * @param {ObjectId} itemId + */ + static async importAlbumAssets(itemId) { + const album = await AlbumsModel.findById(itemId); + + if (!album) { + throw new ErrorEvent( + 404, + "Item non trouvé", + `L'album avant l'id ${itemId} n'existe plus dans la collection` + ); + } + + const item = await getAlbumDetails(album.discogsId); + + if (!item) { + throw new ErrorEvent( + 404, + "Erreur de communication", + "Erreur lors de la récupération des informations sur Discogs" + ); + } + + if (item.thumb) { + album.thumb = await uploadFromUrl(item.thumb); + album.thumbType = "local"; + } + const { images } = item; + if (images && images.length > 0) { + for (let i = 0; i < images.length; i += 1) { + images[i].uri150 = await uploadFromUrl(images[i].uri150); + images[i].uri = await uploadFromUrl(images[i].uri); + } + } + + album.images = images; + + await album.save(); + + return true; + } + + /** + * Point d'entrée + * @param {String} state + * + * @return {Object} + */ + async run(state = "NEW") { + const job = await JobsModel.findOne({ + state, + tries: { + $lte: 5, + }, + }); + + if (!job) { + return { message: "All jobs done" }; + } + + job.state = "IN-PROGRESS"; + + await job.save(); + + try { + switch (job.model) { + case "Albums": + await Jobs.importAlbumAssets(job.id); + break; + default: + throw new ErrorEvent( + 500, + "Job inconnu", + `Le job avec l'id ${job._id} n'est pas un job valide` + ); + } + + job.state = "SUCCESS"; + + await job.save(); + + return this.run(state); + } catch (err) { + job.state = "ERROR"; + job.lastTry = new Date(); + job.lastErrorMessage = err.message; + job.tries += 1; + + await job.save(); + + throw err; + } + } + + /** + * Méthode permettant de créer tous les jobs + * + * @return {Object} + */ + static async populate() { + const albums = await AlbumsModel.find(); + + for (let i = 0; i < albums.length; i += 1) { + const jobData = { + model: "Albums", + id: albums[i]._id, + }; + + const job = new JobsModel(jobData); + await job.save(); + } + + return { message: `${albums.length} jobs ajouté à la file d'attente` }; + } +} + +export default Jobs; diff --git a/src/models/cron.js b/src/models/jobs.js similarity index 71% rename from src/models/cron.js rename to src/models/jobs.js index 6e782cc..ba3727d 100644 --- a/src/models/cron.js +++ b/src/models/jobs.js @@ -2,13 +2,13 @@ import mongoose from "mongoose"; const { Schema } = mongoose; -const CronSchema = new mongoose.Schema( +const JobSchema = new mongoose.Schema( { model: String, id: Schema.Types.ObjectId, state: { type: String, - enum: ["NEW", "ERROR", "SUCCESS"], + enum: ["NEW", "IN-PROGRESS", "ERROR", "SUCCESS"], default: "NEW", }, lastTry: Date, @@ -21,4 +21,4 @@ const CronSchema = new mongoose.Schema( { timestamps: true } ); -export default mongoose.model("Cron", CronSchema); +export default mongoose.model("Jobs", JobSchema); diff --git a/src/routes/jobs.js b/src/routes/jobs.js new file mode 100644 index 0000000..6bac899 --- /dev/null +++ b/src/routes/jobs.js @@ -0,0 +1,40 @@ +import express from "express"; +import passport from "passport"; + +import Jobs from "../middleware/Jobs"; + +// eslint-disable-next-line new-cap +const router = express.Router(); + +router.route("/").get( + passport.authenticate(["jobs"], { + session: false, + }), + async (req, res, next) => { + try { + const job = new Jobs(); + const data = await job.run(req.query.state); + + return res.status(200).json(data).end(); + } catch (err) { + return next(err); + } + } +); + +router.route("/populate").get( + passport.authenticate(["jobs"], { + session: false, + }), + async (req, res, next) => { + try { + const data = await Jobs.populate(); + + return res.status(200).json(data).end(); + } catch (err) { + return next(err); + } + } +); + +export default router;