-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
af8d163
commit 46e424d
Showing
1 changed file
with
96 additions
and
59 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,73 +1,110 @@ | ||
"use strict"; | ||
const scraper = require("../peviitor_scraper.js"); | ||
const uuid = require("uuid"); | ||
const { Scraper, postApiPeViitor } = require("peviitor_jsscraper"); | ||
const { getTownAndCounty } = require("../getTownAndCounty.js"); | ||
|
||
const url = "https://nxp.wd3.myworkdayjobs.com/wday/cxs/nxp/careers/jobs"; | ||
const generateJob = (job_title, job_link, city, county) => ({ | ||
job_title, | ||
job_link, | ||
country: "Romania", | ||
city, | ||
county, | ||
}); | ||
|
||
const company = { company: "NPX" }; | ||
let finalJobs = []; | ||
const getAditionalCity = async (url) => { | ||
const scraper = new Scraper(url); | ||
const res = await scraper.get_soup("JSON"); | ||
|
||
const s = new scraper.ApiScraper(url); | ||
s.headers.headers["Content-Type"] = "application/json"; | ||
s.headers.headers["Accept"] = "application/json"; | ||
const citys = res.jobPostingInfo.additionalLocations; | ||
for (let i = 0; i < citys.length; i++) { | ||
let city = citys[i]; | ||
if (city === "Bucharest") { | ||
city = "Bucuresti"; | ||
} | ||
const county = getTownAndCounty(city).county; | ||
|
||
let data = { appliedFacets: {}, limit: 20, offset: 0, searchText: "Romania" }; | ||
if (county) { | ||
return county; | ||
} | ||
} | ||
}; | ||
|
||
s.post(data).then((response) => { | ||
let step = 20; | ||
let totalJobs = response.total; | ||
|
||
const range = scraper.range(0, totalJobs, step); | ||
const getJobs = async () => { | ||
const url = "https://nxp.wd3.myworkdayjobs.com/wday/cxs/nxp/careers/jobs"; | ||
const scraper = new Scraper(url); | ||
const additionalHeaders = { | ||
"Content-Type": "application/json", | ||
Accept: "application/json", | ||
}; | ||
scraper.config.headers = { ...scraper.config.headers, ...additionalHeaders }; | ||
const limit = 20; | ||
const data = { | ||
appliedFacets: { Location_Country: ["f2e609fe92974a55a05fc1cdc2852122"] }, | ||
limit: 20, | ||
offset: 0, | ||
searchText: "", | ||
}; | ||
let soup = await scraper.post(data); | ||
const { total } = soup; | ||
const numberOfPages = Math.floor( | ||
total % limit === 0 ? total / limit : total / limit + 1 | ||
); | ||
const jobs = []; | ||
for (let i = 0; i < numberOfPages; i += 1) { | ||
data.offset = i * limit; | ||
soup = await scraper.post(data); | ||
const { jobPostings } = soup; | ||
jobPostings.forEach((jobPosting) => { | ||
const { title, externalPath, locationsText } = jobPosting; | ||
const job_link_prefix = "https://nxp.wd3.myworkdayjobs.com/en-US/careers"; | ||
const job_link = job_link_prefix + externalPath; | ||
const separatorIndex = locationsText.indexOf(","); | ||
let city = locationsText.substring(separatorIndex + 1); | ||
|
||
const fetchData = () => { | ||
return new Promise((resolve, reject) => { | ||
for (let i = 0; i < range.length; i++) { | ||
data["offset"] = range[i]; | ||
s.post(data).then((response) => { | ||
let jobs = response.jobPostings; | ||
jobs.forEach((job) => { | ||
finalJobs.push(job); | ||
}); | ||
if (finalJobs.length === totalJobs) { | ||
resolve(finalJobs); | ||
} | ||
}); | ||
if (city === "Bucharest") { | ||
city = "Bucuresti"; | ||
} | ||
}); | ||
}; | ||
|
||
let jobs = []; | ||
let county = getTownAndCounty(city).county; | ||
|
||
fetchData() | ||
.then((finalJobs) => { | ||
finalJobs.forEach((job) => { | ||
const id = uuid.v4(); | ||
const job_title = job.title; | ||
const job_link = | ||
"https://nxp.wd3.myworkdayjobs.com/en-US/careers" + job.externalPath; | ||
const city = job.locationsText.split(",")[0]; | ||
const isCounty = async () => { | ||
if (county) { | ||
return county; | ||
} else { | ||
const jobName = externalPath.split("/")[3]; | ||
const url = `https://nxp.wd3.myworkdayjobs.com/wday/cxs/nxp/careers/job/${jobName}`; | ||
return await getAditionalCity(url); | ||
} | ||
}; | ||
|
||
jobs.push({ | ||
id: id, | ||
job_title: job_title, | ||
job_link: job_link, | ||
company: company.company, | ||
country: "Romania", | ||
city: city, | ||
}); | ||
isCounty().then((county) => { | ||
const job = generateJob(title, job_link, city, county); | ||
jobs.push(job); | ||
}); | ||
}) | ||
.then(() => { | ||
console.log(JSON.stringify(jobs, null, 2)); | ||
}); | ||
} | ||
|
||
scraper.postApiPeViitor(jobs, company); | ||
return jobs; | ||
}; | ||
|
||
let logo = "https://nxp.wd3.myworkdayjobs.com/careers/assets/logo"; | ||
const getParams = () => { | ||
const company = "NPX"; | ||
const logo = "https://nxp.wd3.myworkdayjobs.com/careers/assets/logo"; | ||
const apikey = process.env.APIKEY; | ||
const params = { | ||
company, | ||
logo, | ||
apikey, | ||
}; | ||
return params; | ||
}; | ||
|
||
let postLogo = new scraper.ApiScraper( | ||
"https://api.peviitor.ro/v1/logo/add/" | ||
); | ||
postLogo.headers.headers["Content-Type"] = "application/json"; | ||
postLogo.post(JSON.stringify([{ id: company.company, logo: logo }])); | ||
}); | ||
}); | ||
const run = async () => { | ||
const jobs = await getJobs(); | ||
const params = getParams(); | ||
postApiPeViitor(jobs, params); | ||
}; | ||
|
||
if (require.main === module) { | ||
run(); | ||
} | ||
|
||
module.exports = { run, getJobs, getParams }; // this is needed for our unit test job |