npm package discovery and stats viewer.

Discover Tips

  • General search

    [free text search, go nuts!]

  • Package details

    pkg:[package-name]

  • User packages

    @[username]

Sponsor

Optimize Toolset

I’ve always been into building performant and accessible sites, but lately I’ve been taking it extremely seriously. So much so that I’ve been building a tool to help me optimize and monitor the sites that I build to make sure that I’m making an attempt to offer the best experience to those who visit them. If you’re into performant, accessible and SEO friendly sites, you might like it too! You can check it out at Optimize Toolset.

About

Hi, 👋, I’m Ryan Hefner  and I built this site for me, and you! The goal of this site was to provide an easy way for me to check the stats on my npm packages, both for prioritizing issues and updates, and to give me a little kick in the pants to keep up on stuff.

As I was building it, I realized that I was actually using the tool to build the tool, and figured I might as well put this out there and hopefully others will find it to be a fast and useful way to search and browse npm packages as I have.

If you’re interested in other things I’m working on, follow me on Twitter or check out the open source projects I’ve been publishing on GitHub.

I am also working on a Twitter bot for this site to tweet the most popular, newest, random packages from npm. Please follow that account now and it will start sending out packages soon–ish.

Open Software & Tools

This site wouldn’t be possible without the immense generosity and tireless efforts from the people who make contributions to the world and share their work via open source initiatives. Thank you 🙏

© 2024 – Pkg Stats / Ryan Hefner

my-cognitive-bot

v1.0.1

Published

A cognitive bot implementation using TensorFlow.js

Downloads

10

Readme

My Cognitive Bot

My Cognitive Bot is an AI-powered chatbot with cognitive capabilities implemented using TensorFlow.js. This bot can learn and respond to user inputs intelligently.

Table of Contents

Installation

To get started with My Cognitive Bot, you need to have Node.js installed on your system. Follow the steps below to set up the project:

  1. Clone the repository:

    git clone https://github.com/vrzaq/my-cognitive-bot.git
    cd my-cognitive-bot
  2. Install dependencies:

    npm install

Usage

To start the bot, run the following command:

npm start

The bot will start running and will be ready to interact with.

Project Structure

The project consists of the following files:

  • index.js: Entry point of the application.
  • CognitiveBot.js: Contains the implementation of the CognitiveBot class.

index.js

import CognitiveBot from './CognitiveBot.js';

// Vocabulary size, embedding size, and hidden size
const vocabSize = 5000;
const embeddingSize = 128;
const hiddenSize = 128;

const bot = new CognitiveBot(vocabSize, embeddingSize, hiddenSize);

// Example training data
const inputTexts = ['hello', 'how are you'];
const targetTexts = ['hi', 'I am fine'];

async function trainBot() {
  await bot.train(inputTexts, targetTexts);
  console.log('Training completed.');
}

trainBot();

async function respondToInput(input) {
  const response = await bot.respond(input);
  console.log('Bot response:', response);
}

// Example interaction
respondToInput('hello');

CognitiveBot.js

import * as tf from '@tensorflow/tfjs';

class CognitiveBot {
  constructor(vocabSize, embeddingSize, hiddenSize) {
    this.vocabSize = vocabSize;
    this.embeddingSize = embeddingSize;
    this.hiddenSize = hiddenSize;

    this.embedding = tf.layers.embedding({ inputDim: vocabSize, outputDim: embeddingSize });
    this.encoder = tf.layers.lstm({ units: hiddenSize, returnState: true });
    this.decoder = tf.layers.lstm({ units: hiddenSize, returnSequences: true, returnState: true });
    this.output = tf.layers.dense({ units: vocabSize, activation: 'softmax' });

    this.optimizer = tf.train.adam();
    this.lossFunction = tf.losses.sparseCategoricalCrossentropy;

    this.conversationHistory = [];
  }

  async encode(inputText) {
    const embeddedInput = this.embedding.apply(inputText);
    const [, stateH, stateC] = await this.encoder.apply(embeddedInput);
    return [stateH, stateC];
  }

  async decode(stateH, stateC, targetText) {
    const embeddedTarget = this.embedding.apply(targetText);
    const [decoderOutputs, , ] = await this.decoder.apply(embeddedTarget, { initialState: [stateH, stateC] });
    const output = await this.output.apply(decoderOutputs);
    return output;
  }

  async train(inputTexts, targetTexts) {
    for (let i = 0; i < inputTexts.length; i++) {
      const inputText = inputTexts[i];
      const targetText = targetTexts[i];

      const gradientFunction = () => {
        const [stateH, stateC] = this.encode(inputText);
        const output = this.decode(stateH, stateC, targetText);
        const loss = this.lossFunction(targetText, output);
        return [loss, this.getTrainableWeights()];
      };

      const grads = await tf.tidy(() => this.optimizer.minimize(gradientFunction));
      this.optimizer.applyGradients(grads);
    }
  }

  async respond(inputText) {
    const [stateH, stateC] = await this.encode(inputText);
    const startToken = tf.tensor2d([this.vocabSize - 1], [1, 1]); // Start token
    let outputText = [startToken];

    while (true) {
      const [output, newStateH, newStateC] = await this.decoder.apply(tf.expand_dims(outputText[outputText.length - 1], 0), { initialState: [stateH, stateC] });
      const predictedToken = tf.argMax(output, -1);
      outputText.push(predictedToken);

      if (predictedToken.dataSync()[0] === this.vocabSize - 2) { // End token
        break;
      }

      [stateH, stateC] = [newStateH, newStateC];
    }

    this.conversationHistory.push([inputText, outputText]);
    return outputText.slice(1, -1).map((token) => this.vocabToText(token.dataSync()[0]));
  }

  vocabToText(token) {
    // Implement this function to convert tokens to text
  }
}

export default CognitiveBot;

Training the Model

The train method of the CognitiveBot class is used to train the bot with the provided input and target texts. You can modify the inputTexts and targetTexts arrays with your training data and call the train function to start training.

const inputTexts = ['hello', 'how are you'];
const targetTexts = ['hi', 'I am fine'];

async function trainBot() {
  await bot.train(inputTexts, targetTexts);
  console.log('Training completed.');
}

trainBot();

Author

This project is developed by Arifi Razzaq.

License

This project is licensed under the MIT License.


Let me know if you need any adjustments or additional sections!