{WIP} #37
This commit is contained in:
parent
e58c6af71d
commit
d02cfcf94e
11 changed files with 231 additions and 34 deletions
22
README.md
22
README.md
|
@ -184,6 +184,26 @@ server {
|
|||
|
||||
Une fois le vhost activé (lien symbolique dans le dossier site-enable) et nginx rechargé votre site sera alors accessible en https.
|
||||
|
||||
### Jobs
|
||||
|
||||
Par défaut toute les images des albums sont affichées depuis Discogs. Cependant avec les temps les urls deviennent invalides. Pour éviter cela lors de l'ajout d'un album à votre collection un job est créé. Ce job a pour rôle de stocker les images sur un bucket s3.
|
||||
|
||||
Pour lancer les jobs il faut mettre en place une tâche cron qui sera éxécutée toute les heures (par exemple).
|
||||
|
||||
Exemple de crontab :
|
||||
```crontab
|
||||
0 * * * * curl 'http://localhost:3001/jobs' \
|
||||
-H 'JOBS_HEADER_KEY: JOBS_HEADER_VALUE' \
|
||||
-H 'Accept: application/json'
|
||||
30 * * * * curl 'http://localhost:3001/jobs?state=ERROR' \
|
||||
-H 'JOBS_HEADER_KEY: JOBS_HEADER_VALUE' \
|
||||
-H 'Accept: application/json'
|
||||
```
|
||||
|
||||
N'oubliez pas de remplacer `localhost:30001`, `JOBS_HEADER_KEY` et `JOBS_HEADER_VALUE` par les bonnes valeurs.
|
||||
|
||||
La première ligne permet de parcourir tous les nouveaux jobs alors que la seconde permet de relancer les jobs en erreurs (après 5 tentatives le job est marqué comme définitivement perdu).
|
||||
|
||||
### Fichier .env {#env-file}
|
||||
|
||||
Voici la liste des variables configurables :
|
||||
|
@ -204,6 +224,8 @@ S3_ENDPOINT # Url de l'instance aws (s3.fr-par.scw.cloud pour scaleway france pa
|
|||
S3_SIGNATURE # Version de la signature AWS (s3v4 pour scaleway par exemple)
|
||||
S3_BASEFOLDER # Nom du sous dossier dans lequel seront mis les pochettes des albums
|
||||
S3_BUCKET # Nom du bucket
|
||||
JOBS_HEADER_KEY # Nom du header utilisé pour l'identification des tâches cron (par exemple musictopus)
|
||||
JOBS_HEADER_VALUE # Valeur de la clé
|
||||
```
|
||||
|
||||
## Contributeurs
|
||||
|
|
|
@ -34,6 +34,8 @@ services:
|
|||
S3_BUCKET: ${S3_BUCKET}
|
||||
S3_ENDPOINT: ${S3_ENDPOINT}
|
||||
S3_SIGNATURE: ${S3_SIGNATURE}
|
||||
JOBS_HEADER_KEY: ${JOBS_HEADER_KEY}
|
||||
JOBS_HEADER_VALUE: ${JOBS_HEADER_VALUE}
|
||||
networks:
|
||||
- musictopus
|
||||
musictopus-db:
|
||||
|
|
|
@ -34,6 +34,8 @@ services:
|
|||
S3_BUCKET: ${S3_BUCKET}
|
||||
S3_ENDPOINT: ${S3_ENDPOINT}
|
||||
S3_SIGNATURE: ${S3_SIGNATURE}
|
||||
JOBS_HEADER_KEY: ${JOBS_HEADER_KEY}
|
||||
JOBS_HEADER_VALUE: ${JOBS_HEADER_VALUE}
|
||||
networks:
|
||||
- musictopus
|
||||
musictopus-db:
|
||||
|
|
|
@ -61,6 +61,7 @@
|
|||
"mongoose-unique-validator": "^3.0.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"passport": "^0.5.2",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-http": "^0.3.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"rimraf": "^3.0.2",
|
||||
|
|
11
src/app.js
11
src/app.js
|
@ -7,6 +7,8 @@ import flash from "connect-flash";
|
|||
import session from "express-session";
|
||||
import MongoStore from "connect-mongo";
|
||||
|
||||
import passportConfig from "./libs/passport";
|
||||
|
||||
import config, { env, mongoDbUri, secret } from "./config";
|
||||
|
||||
import { isXhr } from "./helpers";
|
||||
|
@ -15,15 +17,13 @@ import indexRouter from "./routes";
|
|||
import maCollectionRouter from "./routes/ma-collection";
|
||||
import collectionRouter from "./routes/collection";
|
||||
|
||||
import importJobsRouter from "./routes/jobs";
|
||||
|
||||
import importAlbumRouterApiV1 from "./routes/api/v1/albums";
|
||||
import importSearchRouterApiV1 from "./routes/api/v1/search";
|
||||
import importMeRouterApiV1 from "./routes/api/v1/me";
|
||||
|
||||
// Mongoose schema init
|
||||
require("./models/users");
|
||||
require("./models/albums");
|
||||
|
||||
require("./libs/passport")(passport);
|
||||
passportConfig(passport);
|
||||
|
||||
mongoose
|
||||
.connect(mongoDbUri, { useNewUrlParser: true, useUnifiedTopology: true })
|
||||
|
@ -85,6 +85,7 @@ app.use(
|
|||
app.use("/", indexRouter);
|
||||
app.use("/ma-collection", maCollectionRouter);
|
||||
app.use("/collection", collectionRouter);
|
||||
app.use("/jobs", importJobsRouter);
|
||||
app.use("/api/v1/albums", importAlbumRouterApiV1);
|
||||
app.use("/api/v1/search", importSearchRouterApiV1);
|
||||
app.use("/api/v1/me", importMeRouterApiV1);
|
||||
|
|
|
@ -14,4 +14,7 @@ module.exports = {
|
|||
s3Bucket: process.env.S3_BUCKET || "musictopus",
|
||||
s3Endpoint: process.env.S3_ENDPOINT || "s3.fr-par.scw.cloud",
|
||||
s3Signature: process.env.S3_SIGNATURE || "s3v4",
|
||||
jobsHeaderKey: process.env.JOBS_HEADER_KEY || "musictopus",
|
||||
jobsHeaderValue:
|
||||
process.env.JOBS_HEADER_VALUE || "ooYee9xok7eigo2shiePohyoGh1eepew",
|
||||
};
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
/* eslint-disable func-names */
|
||||
const mongoose = require("mongoose");
|
||||
const LocalStrategy = require("passport-local").Strategy;
|
||||
const { BasicStrategy } = require("passport-http");
|
||||
import { Strategy as LocalStrategy } from "passport-local";
|
||||
import { BasicStrategy } from "passport-http";
|
||||
import { Strategy as CustomStrategy } from "passport-custom";
|
||||
|
||||
const Users = mongoose.model("Users");
|
||||
import Users from "../models/users";
|
||||
|
||||
module.exports = function (passport) {
|
||||
import { jobsHeaderKey, jobsHeaderValue } from "../config";
|
||||
|
||||
export default (passport) => {
|
||||
passport.serializeUser((user, done) => {
|
||||
done(null, user);
|
||||
});
|
||||
|
@ -55,4 +57,17 @@ module.exports = function (passport) {
|
|||
.catch(done);
|
||||
})
|
||||
);
|
||||
passport.use(
|
||||
"jobs",
|
||||
new CustomStrategy((req, next) => {
|
||||
const apiKey = req.headers[jobsHeaderKey];
|
||||
|
||||
if (apiKey === jobsHeaderValue) {
|
||||
return next(null, {
|
||||
username: "jobs",
|
||||
});
|
||||
}
|
||||
return next(null, false, "Oops! Identifiants incorrects");
|
||||
})
|
||||
);
|
||||
};
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
/* eslint-disable no-await-in-loop */
|
||||
import moment from "moment";
|
||||
|
||||
import Pages from "./Pages";
|
||||
import Export from "./Export";
|
||||
|
||||
import AlbumsModel from "../models/albums";
|
||||
import CronModel from "../models/cron";
|
||||
import JobsModel from "../models/jobs";
|
||||
import UsersModel from "../models/users";
|
||||
import ErrorEvent from "../libs/error";
|
||||
// import { uploadFromUrl } from "../libs/aws";
|
||||
|
@ -31,34 +30,18 @@ class Albums extends Pages {
|
|||
: null;
|
||||
delete data.id;
|
||||
|
||||
// INFO: {POC} Pour chaque image on récupère une version que l'on stocke localement
|
||||
// Utiliser un cron qui check la librairie pour mettre à jour les urls des images
|
||||
// Mettre en cron l'id du nouvel élément créé pour me pas parser toute la bibliothèque à chaque fois
|
||||
// if (data.thumb) {
|
||||
// data.thumb = await uploadFromUrl(data.thumb);
|
||||
// data.thumbType = "local";
|
||||
// }
|
||||
// if (data.images && data.images.length > 0) {
|
||||
// for (let i = 0; i < data.images.length; i += 1) {
|
||||
// data.images[i].uri150 = await uploadFromUrl(
|
||||
// data.images[i].uri150
|
||||
// );
|
||||
// data.images[i].uri = await uploadFromUrl(data.images[i].uri);
|
||||
// }
|
||||
// }
|
||||
|
||||
const album = new AlbumsModel(data);
|
||||
|
||||
await album.save();
|
||||
|
||||
const cronData = {
|
||||
const jobData = {
|
||||
model: "Albums",
|
||||
id: album._id,
|
||||
};
|
||||
|
||||
const cron = new CronModel(cronData);
|
||||
const job = new JobsModel(jobData);
|
||||
|
||||
cron.save();
|
||||
job.save();
|
||||
|
||||
return album;
|
||||
}
|
||||
|
|
128
src/middleware/Jobs.js
Normal file
128
src/middleware/Jobs.js
Normal file
|
@ -0,0 +1,128 @@
|
|||
/* eslint-disable no-await-in-loop */
|
||||
import ErrorEvent from "../libs/error";
|
||||
import { uploadFromUrl } from "../libs/aws";
|
||||
import { getAlbumDetails } from "../helpers";
|
||||
|
||||
import JobsModel from "../models/jobs";
|
||||
import AlbumsModel from "../models/albums";
|
||||
|
||||
class Jobs {
|
||||
/**
|
||||
* Méthode permettant de télécharger toute les images d'un album
|
||||
* @param {ObjectId} itemId
|
||||
*/
|
||||
static async importAlbumAssets(itemId) {
|
||||
const album = await AlbumsModel.findById(itemId);
|
||||
|
||||
if (!album) {
|
||||
throw new ErrorEvent(
|
||||
404,
|
||||
"Item non trouvé",
|
||||
`L'album avant l'id ${itemId} n'existe plus dans la collection`
|
||||
);
|
||||
}
|
||||
|
||||
const item = await getAlbumDetails(album.discogsId);
|
||||
|
||||
if (!item) {
|
||||
throw new ErrorEvent(
|
||||
404,
|
||||
"Erreur de communication",
|
||||
"Erreur lors de la récupération des informations sur Discogs"
|
||||
);
|
||||
}
|
||||
|
||||
if (item.thumb) {
|
||||
album.thumb = await uploadFromUrl(item.thumb);
|
||||
album.thumbType = "local";
|
||||
}
|
||||
const { images } = item;
|
||||
if (images && images.length > 0) {
|
||||
for (let i = 0; i < images.length; i += 1) {
|
||||
images[i].uri150 = await uploadFromUrl(images[i].uri150);
|
||||
images[i].uri = await uploadFromUrl(images[i].uri);
|
||||
}
|
||||
}
|
||||
|
||||
album.images = images;
|
||||
|
||||
await album.save();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Point d'entrée
|
||||
* @param {String} state
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
async run(state = "NEW") {
|
||||
const job = await JobsModel.findOne({
|
||||
state,
|
||||
tries: {
|
||||
$lte: 5,
|
||||
},
|
||||
});
|
||||
|
||||
if (!job) {
|
||||
return { message: "All jobs done" };
|
||||
}
|
||||
|
||||
job.state = "IN-PROGRESS";
|
||||
|
||||
await job.save();
|
||||
|
||||
try {
|
||||
switch (job.model) {
|
||||
case "Albums":
|
||||
await Jobs.importAlbumAssets(job.id);
|
||||
break;
|
||||
default:
|
||||
throw new ErrorEvent(
|
||||
500,
|
||||
"Job inconnu",
|
||||
`Le job avec l'id ${job._id} n'est pas un job valide`
|
||||
);
|
||||
}
|
||||
|
||||
job.state = "SUCCESS";
|
||||
|
||||
await job.save();
|
||||
|
||||
return this.run(state);
|
||||
} catch (err) {
|
||||
job.state = "ERROR";
|
||||
job.lastTry = new Date();
|
||||
job.lastErrorMessage = err.message;
|
||||
job.tries += 1;
|
||||
|
||||
await job.save();
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Méthode permettant de créer tous les jobs
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
static async populate() {
|
||||
const albums = await AlbumsModel.find();
|
||||
|
||||
for (let i = 0; i < albums.length; i += 1) {
|
||||
const jobData = {
|
||||
model: "Albums",
|
||||
id: albums[i]._id,
|
||||
};
|
||||
|
||||
const job = new JobsModel(jobData);
|
||||
await job.save();
|
||||
}
|
||||
|
||||
return { message: `${albums.length} jobs ajouté à la file d'attente` };
|
||||
}
|
||||
}
|
||||
|
||||
export default Jobs;
|
|
@ -2,13 +2,13 @@ import mongoose from "mongoose";
|
|||
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const CronSchema = new mongoose.Schema(
|
||||
const JobSchema = new mongoose.Schema(
|
||||
{
|
||||
model: String,
|
||||
id: Schema.Types.ObjectId,
|
||||
state: {
|
||||
type: String,
|
||||
enum: ["NEW", "ERROR", "SUCCESS"],
|
||||
enum: ["NEW", "IN-PROGRESS", "ERROR", "SUCCESS"],
|
||||
default: "NEW",
|
||||
},
|
||||
lastTry: Date,
|
||||
|
@ -21,4 +21,4 @@ const CronSchema = new mongoose.Schema(
|
|||
{ timestamps: true }
|
||||
);
|
||||
|
||||
export default mongoose.model("Cron", CronSchema);
|
||||
export default mongoose.model("Jobs", JobSchema);
|
40
src/routes/jobs.js
Normal file
40
src/routes/jobs.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
import express from "express";
|
||||
import passport from "passport";
|
||||
|
||||
import Jobs from "../middleware/Jobs";
|
||||
|
||||
// eslint-disable-next-line new-cap
|
||||
const router = express.Router();
|
||||
|
||||
router.route("/").get(
|
||||
passport.authenticate(["jobs"], {
|
||||
session: false,
|
||||
}),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
const job = new Jobs();
|
||||
const data = await job.run(req.query.state);
|
||||
|
||||
return res.status(200).json(data).end();
|
||||
} catch (err) {
|
||||
return next(err);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
router.route("/populate").get(
|
||||
passport.authenticate(["jobs"], {
|
||||
session: false,
|
||||
}),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
const data = await Jobs.populate();
|
||||
|
||||
return res.status(200).json(data).end();
|
||||
} catch (err) {
|
||||
return next(err);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export default router;
|
Loading…
Reference in a new issue