Location via proxy:   [ UP ]  
[Report a bug]   [Manage cookies]                

New Text Document

Download as txt, pdf, or txt
Download as txt, pdf, or txt
You are on page 1of 5

import { createRequire } from 'module';

const require = createRequire(import.meta.url);


const { exec } = require('child_process');
import moment from 'moment';
import Const from './Scrapper/settings/const.js'
if(process.argv&&process.argv[2]&&process.argv[2]=='--m')
{
const StartProgram =()=>{
//call api and divide it by cluster per items from const
const parameters = [0,1];
// Execute 2.js in separate command prompt windows with each parameter
parameters.forEach(parameter => {
exec(`start cmd.exe /c node index.js ${process.argv[2]} ${parameter}`, (error,
stdout, stderr) => {
if (error) {
console.error(`An error occurred while executing 2.js with parameter $
{parameter}:`, error);
}
});
});
}

console.log(moment().format("YYYY-MM-DD HH:mm:ss"));
StartProgram();

setInterval(() => {
console.log(moment().format("YYYY-MM-DD HH:mm:ss"))
StartProgram();
}, Const.scrapperTime);
// console.log('2.js executions started with all parameters.');
}

import moment from 'moment';


import { fetchAgents, fetchProxies, fetchScrapping, fetchScrappingv2 } from
'./Scrapper/API.js';
import { ScrapeEvent } from './Scrapper/helpers/scrapper.js'
import proxy from './Scrapper/settings/proxy.js';
import agent from './Scrapper/settings/userAgents.js';
import failedProxies from './Scrapper/settings/failedProxy.js';
import Const from './Scrapper/settings/const.js'

//--s for single one by one properly


//--m for multiple
//-- n for normal limiit 260

let ClusterRecivedFromScrapper;

if (process.argv[2] && process.argv[2] == '--s') {


Const.scrapeType = 's';
}
else if (process.argv[2] && process.argv[2] == '--m') {

Const.scrapeType = 'm';
if (process.argv[3]) {
ClusterRecivedFromScrapper = parseInt(process.argv[3])
}
}
else {
Const.scrapeType = 'n';
}

try {
const ScrapeLoop = async (event) => {
return new Promise(async (resolve, reject) => {
let isResolved = false;
while (isResolved == false) {
let data = await ScrapeEvent(event);
if (data == true) {
isResolved = true;
}
}
return resolve(true);
})
}
const StartProgram = async () => {

//Getting Proxies
const { data: dataProxies, status: statusProxies } = await fetchProxies();
if (statusProxies == 200) {
proxy.proxies = dataProxies;
}
//Getting UserAgents
const { data: dataUserAgents, status: statusAgents } = await fetchAgents();

if (statusAgents == 200) {
agent.UserAgent = dataUserAgents;
}

if (statusAgents == 200 && statusProxies == 200 && dataProxies.length > 0 &&


dataUserAgents.length > 0) {
console.log("Event Listning Started");
//Getting Events
const { data, status: statusEvetnts } = (Const.scrapeType == "s" ||
Const.scrapeType == "m") ? await fetchScrappingv2(Const.scrapeType,
Const.cluster) : await fetchScrapping();
if (statusEvetnts == 200) {
console.log("Total Events: " + data.length)
let dataFinal = [...data]//.map((x,i)=>i==0?x:undefined).filter(x=>x!
=undefined);

if (Const.scrapeType == 'm') {

let startIndex = ClusterRecivedFromScrapper * Const.perClusterItems;


let endIndex = startIndex + Const.perClusterItems;
dataFinal = [...dataFinal.slice(startIndex, endIndex)];

console.log(dataFinal.length,"sssssss");
//convert the chunk data of each 200 event

let e = 1;
for (let event of dataFinal) {
console.log(e);
const dataG = await ScrapeLoop(event);
console.log(dataG, "completed");
if (event.isMapLoaded == true) {

}
e += 1;
failedProxies.failedProxies = [];
}
console.log(moment().format("YYYY-MM-DD HH:mm:ss"), "END")
}
else {
}
}

}
console.log(moment().format("YYYY-MM-DD HH:mm:ss"));
StartProgram();

if (Const.scrapeType == 'n' || Const.scrapeType == 'm') {


setInterval(() => {
console.log(moment().format("YYYY-MM-DD HH:mm:ss"))
StartProgram();
//
}, Const.scrapperTime);

}
catch (e) {
process.abort()
}
import axios from 'axios';
import https from 'https'
import Const from './settings/const.js';

const BaseUrl = Const.url;

const httpsAgent = new https.Agent({


rejectUnauthorized: false,
})

const axiosInstance = axios.create({


BaseUrl,
httpsAgent
});
export const fetchScrapping = () => axiosInstance.get(BaseUrl + 'event/event-
scrape');
export const fetchScrappingv2 = (type,cluster) => {

return axiosInstance.get(BaseUrl + `event/event-scrape/v2?type=${type}&cluster=$


{cluster}`)
};
export const fetchAgents = () => axiosInstance.get(BaseUrl + 'header');
export const fetchProxies = () => axiosInstance.get(BaseUrl + 'proxy');
export const postEventLines = (data) => axiosInstance.post(BaseUrl +
'skyBox',data);

You might also like