Skip to content

Commit

Permalink
fix: Solve minor error
Browse files Browse the repository at this point in the history
  • Loading branch information
GermanMT committed Oct 23, 2023
1 parent 41791c3 commit 5b91874
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 28 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Currently the only way to consume NVD data is through a public [API](https://nvd

## How does it work?

This repository solves this problem by creating an automatic clone on a MongoDB database that the developer has already initialised. This command also create an index for the own id extracted from NVD (cve:id, cpe_match:matchCriteriaId and cpe:cpeNameId). This is done using the command:
This repository solves this problem by creating an automatic clone on a MongoDB database that the developer has already initialised, in approximately 30 minutes. This command also create an index for the own id extracted from NVD (cve:id, cpe_match:matchCriteriaId and cpe:cpeNameId). This is done using the command:

```
python3 main.py clone [your_mongodb_uri] --nvd_api [your_nvd_api_key]
Expand Down
13 changes: 1 addition & 12 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,32 +4,24 @@
from asyncio import run
from src import clone_cves, clone_cpe_matchs, clone_cpes, nvd_sync

import time


@command()
@argument('mongodb_uri')
@option('--nvd_api', default='')
async def clone(mongodb_uri, nvd_api):
headers = {'apiKey': nvd_api}

client = AsyncIOMotorClient(mongodb_uri)

dbs = await client.list_databases()
while dbs.alive:
db = await dbs.next()
if db['name'] == 'nvd':
raise Exception('Database have been ready cloned. Delete it or run updater command.')

raise Exception('NVD database have been already cloned. Delete it or run sync command.')
if nvd_api:
delay = 1
else:
delay = 6

await clone_cves(client, delay, headers)

await clone_cpe_matchs(client, delay, headers)

await clone_cpes(client, delay, headers)


Expand All @@ -38,14 +30,11 @@ async def clone(mongodb_uri, nvd_api):
@option('--nvd_api', default='')
async def sync(mongodb_uri, nvd_api):
headers = {'apiKey': nvd_api}

client = AsyncIOMotorClient(mongodb_uri)

if nvd_api:
delay = 1.0
else:
delay = 6.0

scheduler = BackgroundScheduler()
scheduler.add_job(nvd_sync, 'interval', args=[client, headers, delay], seconds=7200)
scheduler.start()
Expand Down
9 changes: 4 additions & 5 deletions src/cpe.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from requests import get, ConnectTimeout, ConnectionError
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase, AsyncIOMotorCollection
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection
from time import sleep
from pymongo import InsertOne

Expand All @@ -8,22 +8,21 @@


async def clone_cpes(client: AsyncIOMotorClient, delay: float, headers: dict[str, str]):
nvd_clone_db: AsyncIOMotorDatabase = client.nvd
cpes_collection: AsyncIOMotorCollection = nvd_clone_db.get_collection('cpes')
cpes_collection: AsyncIOMotorCollection = client.nvd.get_collection('cpes')
await cpes_collection.create_index('cpeNameId', unique=True)
index: int = 0
while True:
actions: list[InsertOne] = []
while True:
try:
response = get(cpe_uri + str(index), headers=headers).json()
sleep(delay)
break
except (ConnectTimeout, ConnectionError):
sleep(6)
actions: list[InsertOne] = []
for product in response['products']:
actions.append(InsertOne(product['cpe']))
index += response['resultsPerPage']
await cpes_collection.bulk_write(actions, ordered=False)
index += response['resultsPerPage']
if index == response['totalResults']:
break
9 changes: 4 additions & 5 deletions src/cpe_match.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from requests import get, ConnectTimeout, ConnectionError
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase, AsyncIOMotorCollection
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection
from time import sleep
from pymongo import InsertOne

Expand All @@ -8,22 +8,21 @@


async def clone_cpe_matchs(client: AsyncIOMotorClient, delay: float, headers: dict[str, str]):
nvd_clone_db: AsyncIOMotorDatabase = client.nvd
cpe_match_collection: AsyncIOMotorCollection = nvd_clone_db.get_collection('cpe_matchs')
cpe_match_collection: AsyncIOMotorCollection = client.nvd.get_collection('cpe_matchs')
await cpe_match_collection.create_index('matchCriteriaId', unique=True)
index: int = 0
while True:
actions: list[InsertOne] = []
while True:
try:
response = get(cpe_match_uri + str(index), headers=headers).json()
sleep(delay)
break
except (ConnectTimeout, ConnectionError):
sleep(6)
actions: list[InsertOne] = []
for match_string in response['matchStrings']:
actions.append(InsertOne(match_string['matchString']))
index += response['resultsPerPage']
await cpe_match_collection.bulk_write(actions, ordered=False)
index += response['resultsPerPage']
if index == response['totalResults']:
break
9 changes: 4 additions & 5 deletions src/cve.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from requests import get, ConnectTimeout, ConnectionError
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase, AsyncIOMotorCollection
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection
from time import sleep
from pymongo import InsertOne

Expand All @@ -8,22 +8,21 @@


async def clone_cves(client: AsyncIOMotorClient, delay: float, headers: dict[str, str]):
nvd_clone_db: AsyncIOMotorDatabase = client.nvd
cves_collection: AsyncIOMotorCollection = nvd_clone_db.get_collection('cves')
cves_collection: AsyncIOMotorCollection = client.nvd.get_collection('cves')
await cves_collection.create_index('id', unique=True)
index: int = 0
while True:
actions: list[InsertOne] = []
while True:
try:
response = get(cve_uri + str(index), headers=headers).json()
sleep(delay)
break
except (ConnectTimeout, ConnectionError):
sleep(6)
actions: list[InsertOne] = []
for vulnerability in response['vulnerabilities']:
actions.append(InsertOne(vulnerability['cve']))
index += response['resultsPerPage']
await cves_collection.bulk_write(actions, ordered=False)
index += response['resultsPerPage']
if index == response['totalResults']:
break

0 comments on commit 5b91874

Please sign in to comment.