Scraping a web page using Node js in Javascript extension

I’m creating a javascript extension for firefox in which I want to use a scraper to pass an url to a function in the extension. The scraper is built using Cheerio and I’ve created and tested the scraper using Node.js on my local machine, but I’m having trouble implementing it in the extension. I know most likely, running an extension inside a browser, doesn’t let you create a ‘request’ connection.

When I call the scrape function it gives no error, instead it gives no return at all (no console logs either, so it’s not firing). Does anyone know how I can call this function from an extension on any machine? Is there a way to include Node js in the extension or something like that? Code is below!

function scrape(item){
request(url, function (error, response, html) {
if (!error && response.statusCode == 200) {
console.log("connected");
// create cheerio
var $ = cheerio.load(html);

    $('.inner-article a').each(function(i, elem){
        if($(this).children().length == 1){
            availableItems.push($(this).attr('href'));
        }
    });

    availableItems.forEach(function(i){
        request(url + i , function (error, response, html) {
        if (!error && response.statusCode == 200) {
            var $ = cheerio.load(html);
            var title = $('#details').find('h1').text();
            title = title.toLowerCase();
            title = title.replace(/s+/g, '');

            var n = title.includes(item);
            if(n){
                console.log('found');
                itemurl = url + i +'';
                return itemurl;
            }
        }
        });
    });
}
});
}

Thank you very much!