Browse Source

Add readme for top level folder, clean up dead code

Alex Tran 3 years ago
parent
commit
613b4449a7

+ 33 - 0
README.md

@@ -0,0 +1,33 @@
+# IMMICH
+
+Self-hosted Photo backup solution directly from your mobile phone.
+
+# Development
+
+You can use docker compose for development, there are several services that compose Immich
+
+1. The server
+2. PostgreSQL
+3. Redis
+
+## Populate .env file
+
+Navigate to `server` directory and run
+
+```
+cp .env.example .env
+```
+
+Then populate the value in there.
+
+To start, run
+
+```bash
+docker-compose up ./server
+```
+
+To force rebuild node modules after installing new packages
+
+```bash
+docker-compose up --build -V ./server
+```

+ 13 - 0
server/Dockerfile

@@ -29,6 +29,12 @@ COPY . .
 
 
 RUN yarn build
 RUN yarn build
 
 
+# Clean up commands
+RUN apt-get autoremove -y && apt-get clean && \
+  rm -rf /usr/local/src/*
+
+RUN apt-get clean && \
+  rm -rf /var/lib/apt/lists/*
 
 
 FROM ubuntu:20.04 as production
 FROM ubuntu:20.04 as production
 ARG DEBIAN_FRONTEND=noninteractive
 ARG DEBIAN_FRONTEND=noninteractive
@@ -62,4 +68,11 @@ COPY . .
 
 
 COPY --from=development /usr/src/app/dist ./dist
 COPY --from=development /usr/src/app/dist ./dist
 
 
+# Clean up commands
+RUN apt-get autoremove -y && apt-get clean && \
+  rm -rf /usr/local/src/*
+
+RUN apt-get clean && \
+  rm -rf /var/lib/apt/lists/*
+
 CMD ["node", "dist/main"]
 CMD ["node", "dist/main"]

+ 0 - 12
server/README.md

@@ -10,16 +10,4 @@ There is a tensorflow module running in the server so some package will be neede
 $ apt-get install make cmake gcc g++
 $ apt-get install make cmake gcc g++
 ```
 ```
 
 
-# Docker
 
 
-To run application using docker compose
-
-```bash
-docker-compose up
-```
-
-To force rebuild node module after installing new packages
-
-```bash
-docker-compose up --build -V
-```

+ 0 - 4
server/src/config/app.config.ts

@@ -6,14 +6,10 @@ export const immichAppConfig: ConfigModuleOptions = {
   isGlobal: true,
   isGlobal: true,
   validationSchema: Joi.object({
   validationSchema: Joi.object({
     NODE_ENV: Joi.string().required().valid('development', 'production', 'staging').default('development'),
     NODE_ENV: Joi.string().required().valid('development', 'production', 'staging').default('development'),
-    // DB_HOST: Joi.string().required(),
     DB_USERNAME: Joi.string().required(),
     DB_USERNAME: Joi.string().required(),
     DB_PASSWORD: Joi.string().required(),
     DB_PASSWORD: Joi.string().required(),
     DB_DATABASE_NAME: Joi.string().required(),
     DB_DATABASE_NAME: Joi.string().required(),
     UPLOAD_LOCATION: Joi.string().required(),
     UPLOAD_LOCATION: Joi.string().required(),
     JWT_SECRET: Joi.string().required(),
     JWT_SECRET: Joi.string().required(),
-    // REDIS_HOST: Joi.string().required(),
-    // REDIS_PORT: Joi.string().required(),
-    // REDIS_PASSWORD: Joi.string().required(),
   }),
   }),
 };
 };

+ 2 - 2
server/src/modules/image-optimize/image-optimize.module.ts

@@ -7,7 +7,7 @@ import { AssetService } from '../../api-v1/asset/asset.service';
 import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
 import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
 import { ImageOptimizeProcessor } from './image-optimize.processor';
 import { ImageOptimizeProcessor } from './image-optimize.processor';
 import { ImageOptimizeService } from './image-optimize.service';
 import { ImageOptimizeService } from './image-optimize.service';
-// import { MachineLearningProcessor } from './machine-learning.processor';
+import { MachineLearningProcessor } from './machine-learning.processor';
 
 
 @Module({
 @Module({
   imports: [
   imports: [
@@ -30,7 +30,7 @@ import { ImageOptimizeService } from './image-optimize.service';
 
 
     TypeOrmModule.forFeature([AssetEntity]),
     TypeOrmModule.forFeature([AssetEntity]),
   ],
   ],
-  providers: [ImageOptimizeService, ImageOptimizeProcessor],
+  providers: [ImageOptimizeService, ImageOptimizeProcessor, MachineLearningProcessor],
   exports: [ImageOptimizeService],
   exports: [ImageOptimizeService],
 })
 })
 export class ImageOptimizeModule {}
 export class ImageOptimizeModule {}

+ 8 - 7
server/src/modules/image-optimize/image-optimize.processor.ts

@@ -45,13 +45,14 @@ export class ImageOptimizeProcessor {
 
 
           await this.assetRepository.update(savedAsset, { resizePath: resizePath });
           await this.assetRepository.update(savedAsset, { resizePath: resizePath });
 
 
-          const jobb = await this.machineLearningQueue.add(
-            'object-detection',
-            {
-              resizePath,
-            },
-            { jobId: randomUUID() },
-          );
+          // Send file to object detection after resizing
+          // const detectionJob = await this.machineLearningQueue.add(
+          //   'object-detection',
+          //   {
+          //     resizePath,
+          //   },
+          //   { jobId: randomUUID() },
+          // );
         });
         });
     });
     });
 
 

+ 34 - 35
server/src/modules/image-optimize/machine-learning.processor.ts

@@ -1,39 +1,38 @@
-// import { Process, Processor } from '@nestjs/bull';
-// import { InjectRepository } from '@nestjs/typeorm';
-// import { Job } from 'bull';
-// import { Repository } from 'typeorm';
-// import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
-// import sharp from 'sharp';
-// import fs, { existsSync, mkdirSync } from 'fs';
-// import { ConfigService } from '@nestjs/config';
-// import * as tfnode from '@tensorflow/tfjs-node';
-// import * as cocoSsd from '@tensorflow-models/coco-ssd';
+import { Process, Processor } from '@nestjs/bull';
+import { InjectRepository } from '@nestjs/typeorm';
+import { Job } from 'bull';
+import { Repository } from 'typeorm';
+import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
+import fs from 'fs';
+import { ConfigService } from '@nestjs/config';
+import * as tfnode from '@tensorflow/tfjs-node';
+import * as cocoSsd from '@tensorflow-models/coco-ssd';
 
 
-// @Processor('machine-learning')
-// export class MachineLearningProcessor {
-//   constructor(
-//     @InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
-//     private configService: ConfigService,
-//   ) {}
+@Processor('machine-learning')
+export class MachineLearningProcessor {
+  constructor(
+    @InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
+    private configService: ConfigService,
+  ) {}
 
 
-//   @Process('object-detection')
-//   async handleOptimization(job: Job) {
-//     try {
-//       const { resizePath }: { resizePath: string } = job.data;
+  @Process('object-detection')
+  async handleOptimization(job: Job) {
+    try {
+      const { resizePath }: { resizePath: string } = job.data;
 
 
-//       const image = fs.readFileSync(resizePath);
-//       const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
-//       const model = await cocoSsd.load();
-//       const predictions = await model.detect(decodedImage);
-//       console.log('start predictions ------------------ ');
-//       for (var result of predictions) {
-//         console.log(`Found ${result.class} with score ${result.score}`);
-//       }
-//       console.log('end predictions ------------------ ');
+      const image = fs.readFileSync(resizePath);
+      const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
+      const model = await cocoSsd.load();
+      const predictions = await model.detect(decodedImage);
+      console.log('\n\nstart predictions ------------------ ');
+      for (var result of predictions) {
+        console.log(`Found ${result.class} with score ${result.score}`);
+      }
+      console.log('end predictions ------------------\n\n');
 
 
-//       return 'ok';
-//     } catch (e) {
-//       console.log('Error object detection ', e);
-//     }
-//   }
-// }
+      return 'ok';
+    } catch (e) {
+      console.log('Error object detection ', e);
+    }
+  }
+}