@thellimist/plugin-throttling
v1.0.2
Published
Octokit plugin for GitHub's recommended request throttling
Downloads
284
Readme
plugin-throttling.js
Octokit plugin for GitHub’s recommended request throttling
Implements all recommended best practices to prevent hitting secondary rate limits.
Usage
Load @octokit/plugin-throttling
and @octokit/core
(or core-compatible module) directly from esm.sh
<script type="module">
import { Octokit } from "https://esm.sh/@octokit/core";
import { throttling } from "https://esm.sh/@octokit/plugin-throttling";
</script>
Install with npm install @octokit/core @octokit/plugin-throttling
. Optionally replace @octokit/core
with a core-compatible module.
Note: If you use it with @octokit/rest
v16, install @octokit/core
as a devDependency. This is only temporary and will no longer be necessary with @octokit/rest
v17.
const { Octokit } = require("@octokit/core");
const { throttling } = require("@octokit/plugin-throttling");
The code below creates a "Hello, world!" issue on every repository in a given organization. Without the throttling plugin it would send many requests in parallel and would hit rate limits very quickly. But the @octokit/plugin-throttling
slows down your requests according to the official guidelines, so you don't get blocked before your quota is exhausted.
The throttle.onSecondaryRateLimit
and throttle.onRateLimit
options are required. Return true
to automatically retry the request after retryAfter
seconds.
const MyOctokit = Octokit.plugin(throttling);
const octokit = new MyOctokit({
auth: `secret123`,
throttle: {
onRateLimit: (retryAfter, options, octokit, retryCount) => {
octokit.log.warn(
`Request quota exhausted for request ${options.method} ${options.url}`,
);
if (retryCount < 1) {
// only retries once
octokit.log.info(`Retrying after ${retryAfter} seconds!`);
return true;
}
},
onSecondaryRateLimit: (retryAfter, options, octokit) => {
// does not retry, only logs a warning
octokit.log.warn(
`SecondaryRateLimit detected for request ${options.method} ${options.url}`,
);
},
},
});
async function createIssueOnAllRepos(org) {
const repos = await octokit.paginate(
octokit.repos.listForOrg.endpoint({ org }),
);
return Promise.all(
repos.map(({ name }) =>
octokit.issues.create({
owner,
repo: name,
title: "Hello, world!",
}),
),
);
}
Pass { throttle: { enabled: false } }
to disable this plugin.
Clustering
Enabling Clustering support ensures that your application will not go over rate limits across Octokit instances and across Nodejs processes.
First install either redis
or ioredis
:
# NodeRedis (https://github.com/NodeRedis/node_redis)
npm install --save redis
# or ioredis (https://github.com/luin/ioredis)
npm install --save ioredis
Then in your application:
const Bottleneck = require("@thellimist/bottleneck");
const Redis = require("redis");
const client = Redis.createClient({
/* options */
});
const connection = new Bottleneck.RedisConnection({ client });
connection.on("error", err => console.error(err));
const octokit = new MyOctokit({
auth: 'secret123'
throttle: {
onSecondaryRateLimit: (retryAfter, options, octokit) => {
/* ... */
},
onRateLimit: (retryAfter, options, octokit) => {
/* ... */
},
// The Bottleneck connection object
connection,
// A "throttling ID". All octokit instances with the same ID
// using the same Redis server will share the throttling.
id: "my-super-app",
// Otherwise the plugin uses a lighter version of Bottleneck without Redis support
Bottleneck
}
});
// To close the connection and allow your application to exit cleanly:
await connection.disconnect();
To use the ioredis
library instead:
const Redis = require("ioredis");
const client = new Redis({
/* options */
});
const connection = new Bottleneck.IORedisConnection({ client });
connection.on("error", (err) => console.error(err));