Browse Source

add import from module

Germey 5 years ago
parent
commit
b94ee7b02c
5 changed files with 35 additions and 9 deletions
  1. 1 0
      Dockerfile
  2. 20 0
      deployment.yml
  3. 2 0
      docker-compose.yml
  4. 12 8
      proxypool/crawlers/__init__.py
  5. 0 1
      proxypool/setting.py

+ 1 - 0
Dockerfile

@@ -2,4 +2,5 @@ FROM python:3.6
 WORKDIR /app
 COPY . .
 RUN pip install -r requirements.txt
+VOLUME ["/app/proxypool"]
 CMD ["supervisord", "-c", "supervisord.conf"]

+ 20 - 0
deployment.yml

@@ -5,6 +5,19 @@ metadata:
   name: proxypool
 ---
 apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: proxypool
+  namespace: proxypool
+spec:
+  storageClassName: azure-file
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 1Gi
+---
+apiVersion: v1
 items:
   - apiVersion: v1
     kind: Service
@@ -81,7 +94,14 @@ items:
               ports:
                 - containerPort: 5555
               resources: {}
+              volumeMounts:
+                - mountPath: "/app/proxypool"
+                  name: proxypool
           restartPolicy: Always
+          volumes:
+            - name: proxypool
+              persistentVolumeClaim:
+                claimName: proxypool
     status: {}
   - apiVersion: extensions/v1beta1
     kind: Deployment

+ 2 - 0
docker-compose.yml

@@ -14,5 +14,7 @@ services:
     ports:
       - "5555:5555"
     restart: always
+    volumes:
+      - /tmp/proxypool:/app/proxypool
     environment:
       REDIS_HOST: redis

+ 12 - 8
proxypool/crawlers/__init__.py

@@ -1,10 +1,14 @@
-from .daili66 import Daili66Crawler
-from .ip3366 import IP3366Crawler
-from .iphai import IPHaiCrawler
+import pkgutil
+from .base import BaseCrawler
+import inspect
 
 
-__all__ = [
-    Daili66Crawler,
-    IP3366Crawler,
-    IPHaiCrawler
-]
+# load classes subclass of BaseCrawler
+classes = []
+for loader, name, is_pkg in pkgutil.walk_packages(__path__):
+    module = loader.find_module(name).load_module(name)
+    for name, value in inspect.getmembers(module):
+        globals()[name] = value
+        if inspect.isclass(value) and issubclass(value, BaseCrawler) and value is not BaseCrawler:
+            classes.append(value)
+__ALL__ = classes

+ 0 - 1
proxypool/setting.py

@@ -2,7 +2,6 @@ import platform
 from os.path import dirname, abspath, join
 from environs import Env
 from loguru import logger
-
 from proxypool.utils.parse import parse_redis_connection_string