diff --git a/packages/kbot/cat_gen_0.png b/packages/kbot/cat_gen_0.png
deleted file mode 100644
index f8701a11..00000000
Binary files a/packages/kbot/cat_gen_0.png and /dev/null differ
diff --git a/packages/kbot/dist-in/commands/images.js b/packages/kbot/dist-in/commands/images.js
index 872dc29d..96daedef 100644
--- a/packages/kbot/dist-in/commands/images.js
+++ b/packages/kbot/dist-in/commands/images.js
@@ -484,4 +484,4 @@ export const imageCommand = async (argv) => {
logger.error('Failed to parse options or generate image:', error.message, error.issues, error.stack);
}
};
-//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"images.js","sourceRoot":"","sources":["../../src/commands/images.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAClC,OAAO,EAAE,IAAI,IAAI,KAAK,EAAE,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACH,YAAY,EACZ,QAAQ,EACR,UAAU,EACb,MAAM,SAAS,CAAC;AACjB,OAAO,EAAW,MAAM,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAE9D,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EAAE,MAAM,IAAI,aAAa,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAC3C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C,SAAS,sBAAsB,CAAC,GAAuB,EAAE,QAAkB;IACvE,IAAI,MAAc,CAAC;IAEnB,IAAI,GAAG,EAAE,CAAC;QACN,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACnE,IAAI,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YACnC,MAAM,GAAG,WAAW,CAAC;QACzB,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;QACvC,CAAC;IACL,CAAC;SAAM,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC7B,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACvC,CAAC;SAAM,CAAC;QACJ,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,kCAAkC;IAC9D,CAAC;IAED,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,GAAG,CAAC,CAAC;IAEV,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtB,MAAM,gBAAgB,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/E,MAAM,KAAK,GAAG,gBAAgB,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;QACpD,IAAI,KAAK,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;YACvB,YAAY,GAAG,gBAAgB,CAAC,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;YAC1D,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC;QACnC,CAAC;aAAM,CAAC;YACJ,YAAY,GAAG,gBAAgB,CAAC;QACpC,CAAC;IACL,CAAC;SAAM,CAAC;QACJ,YAAY,GAAG,WAAW,CAAC;IAC/B,CAAC;IAED,IAAI,WAAW,CAAC;IAChB,IAAI,YAAY,CAAC;IACjB,GAAG,CAAC;QACA,WAAW,GAAG,GAAG,YAAY,QAAQ,CAAC,MAAM,CAAC;QAC7C,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;QACjD,CAAC,EAAE,CAAC;IACR,CAAC,QAAQ,MAAM,CAAC,YAAY,CAAC,EAAE;IAE/B,OAAO,YAAY,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IAElB,sEAAsE;IACtE,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;IAClE,oFAAoF;IACpF,MAAM,cAAc,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAC5E,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC;QACxB,CAAC,CAAC,SAAS,CAAC;IAEZ,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAEjE,+DAA+D;IAC/D,IAAI,WAAmB,CAAC;IACxB,IAAI,cAAsB,CAAC;IAE3B,QAAQ,OAAO,CAAC,QAAQ,EAAE,CAAC;QACvB,KAAK,OAAO;YACR,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,eAAe,CAAC;YACjC,MAAM;QACV,KAAK,QAAQ;YACT,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV,KAAK,OAAO;YACR,WAAW,GAAG,UAAU,CAAC;YACzB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV;YACI,MAAM,IAAI,KAAK,CAAC,yBAAyB,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrE,CAAC;IAED,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,cAAc,CAAC,CAAC;AACvE,CAAC;AAED,MAAM,CAAC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IACnC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC,IAAI,CAAC;QACpC,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI;QACX,QAAQ,EAAE,IAAI;QACd,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;KACZ,CAAC,CAAC;IAEH,OAAO,UAAU,CAAC,MAAM,CAAC;QACrB,GAAG,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,0BAA0B,CAAC;QAChE,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,gCAAgC,CAAC,CAAC,QAAQ,CAAC,+CAA+C,CAAC;QACrH,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,kDAAkD,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,2CAA2C,CAAC;KACtF,CAAC,CAAC;AACP,CAAC,CAAA;AAED,KAAK,UAAU,qBAAqB,CAAC,IAAS;IAC1C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU;QAC/B,QAAQ,EAAE,CAAC,EAAE,8BAA8B;QAC3C,iBAAiB,EAAE,wEAAwE;KAC9F,CAAC,CAAC;IAEH,OAAO,IAAI,OAAO,CAAC,CAAC,QAAQ,EAAE,MAAM,EAAE,EAAE;QACpC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC;YACtB,OAAO,MAAM,CAAC,IAAI,KAAK,CAAC,iCAAiC,UAAU,8EAA8E,CAAC,CAAC,CAAC;QACxJ,CAAC;QAED,wBAAwB;QACxB,MAAM,IAAI,GAAa,EAAE,CAAC;QAE1B,oBAAoB;QACpB,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAC7E,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,IAAI,CAAC,GAAG,gBAAgB,CAAC,CAAC;QACnC,CAAC;QAED,cAAc;QACd,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC;YACT,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QACnC,CAAC;QAED,UAAU;QACV,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC;QACjC,CAAC;QAED,aAAa;QACb,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACd,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,CAAC;QAED,MAAM,YAAY,GAAG,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC;QAElF,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,WAAW,GAAG,EAAE,CAAC;QAErB,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YAC1C,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAE9B,yCAAyC;YACzC,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAC5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACjC,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBACpC,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBAEnD,2CAA2C;wBAC3C,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;wBAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;wBACnD,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;wBACnG,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;wBAE5D,MAAM,cAAc,GAAG;4BACnB,GAAG,EAAE,4BAA4B;4BACjC,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,IAAI;4BAC3B,GAAG,EAAE,IAAI,CAAC,GAAG,IAAI,IAAI;4BACrB,MAAM,EAAE,MAAM,IAAI,IAAI;4BACtB,KAAK,EAAE,gBAAgB;yBAC1B,CAAC;wBAEF,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;wBAClD,MAAM,CAAC,IAAI,CAAC,6BAA6B,EAAE,UAAU,CAAC,CAAC;wBACvD,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,gCAAgC,EAAE,cAAc,CAAC,CAAC;wBAE9D,kBAAkB;wBAClB,KAAK,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;4BACvC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oCACpB,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;oCAC5C,MAAM,MAAM,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;oCAC9C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;oCAC/F,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;oCAE1C,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,2BAA2B;wCAChC,MAAM;wCACN,QAAQ;wCACR,QAAQ,EAAE,SAAS;qCACtB,CAAC;oCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;oCAChE,MAAM,CAAC,IAAI,CAAC,uBAAuB,QAAQ,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,GAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gCACzF,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,yBAAyB,SAAS,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;4BACtE,CAAC;wBACL,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBAC3C,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBACnD,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;wBAClC,IAAI,YAAY,IAAI,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;4BACzC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC;oCACvB,UAAU,CAAC,YAAY,CAAC,CAAC;oCACzB,MAAM,CAAC,IAAI,CAAC,gCAAgC,YAAY,EAAE,CAAC,CAAC;oCAC5D,MAAM,eAAe,GAAG;wCACpB,GAAG,EAAE,2BAA2B;wCAChC,IAAI,EAAE,YAAY;qCACrB,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC;gCACtE,CAAC;qCAAM,CAAC;oCACJ,MAAM,CAAC,IAAI,CAAC,mCAAmC,YAAY,EAAE,CAAC,CAAC;oCAC/D,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,qBAAqB;wCAC1B,IAAI,EAAE,YAAY;wCAClB,KAAK,EAAE,2BAA2B;qCACrC,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCACpE,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,4BAA4B,YAAY,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;gCACxE,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,qBAAqB;oCAC1B,IAAI,EAAE,YAAY;oCAClB,KAAK,EAAE,KAAK,CAAC,OAAO;iCACvB,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;6BAAM,CAAC;4BACJ,MAAM,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;wBACtE,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,kBAAkB,EAAE,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;wBAEvD,yDAAyD;wBACzD,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC;wBACjC,MAAM,QAAQ,GAAG,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC;wBACrC,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;wBAE3B,2EAA2E;wBAC3E,IAAI,CAAC;4BAED,MAAM,YAAY,GAAG,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;4BAC9D,MAAM,CAAC,IAAI,CAAC,uDAAuD,YAAY,EAAE,CAAC,CAAC;4BAEnF,MAAM,CAAC,IAAI,CAAC,kCAAkC,SAAS,GAAG,CAAC,CAAC;4BAE5D,IAAI,WAAW,GAAkB,IAAI,CAAC;4BAEtC,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gCACtB,gBAAgB;gCAChB,MAAM,CAAC,IAAI,CAAC,qBAAqB,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,SAAS,GAAG,CAAC,CAAC;gCACrF,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,IAAI;oCACP,MAAM,EAAE,SAAS;oCACjB,OAAO,EAAE,QAAQ;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,SAAS,CAAC,SAAS,EAAE,QAAQ,EAAE,aAAa,CAAC,CAAC;4BACtE,CAAC;iCAAM,CAAC;gCACJ,iBAAiB;gCACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,SAAS,GAAG,CAAC,CAAC;gCAC1D,MAAM,YAAY,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;gCACjC,OAAO,YAAY,CAAC,OAAO,CAAC;gCAC5B,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,YAAY;oCACf,MAAM,EAAE,SAAS;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;4BAC9D,CAAC;4BAED,IAAI,WAAW,EAAE,CAAC;gCACd,KAAK,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;gCACjC,MAAM,CAAC,IAAI,CAAC,qBAAqB,YAAY,EAAE,CAAC,CAAC;gCAEjD,uDAAuD;gCACvD,MAAM,YAAY,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;gCAEpD,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,2BAA2B;oCAChC,MAAM,EAAE,YAAY;oCACpB,QAAQ,EAAE,WAAW;oCACrB,QAAQ,EAAE,YAAY;iCACzB,CAAC;gCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCAChE,MAAM,CAAC,IAAI,CAAC,kCAAkC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;4BACjF,CAAC;iCAAM,CAAC;gCACJ,MAAM,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;gCAE3C,yBAAyB;gCACzB,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,kBAAkB;oCACvB,KAAK,EAAE,0BAA0B;iCACpC,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;wBAAC,OAAO,KAAK,EAAE,CAAC;4BACb,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;4BAC5E,MAAM,UAAU,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;4BAEpE,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE,KAAK,EAAC,YAAY,CAAC,CAAC;4BAExD,MAAM,CAAC,KAAK,CAAC,qBAAqB,EAAE;gCAChC,OAAO,EAAE,YAAY;gCACrB,KAAK,EAAE,UAAU;gCACjB,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;gCAChC,KAAK,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;6BAC9C,CAAC,CAAC;4BAEH,kCAAkC;4BAClC,MAAM,aAAa,GAAG;gCAClB,GAAG,EAAE,kBAAkB;gCACvB,KAAK,EAAE,YAAY;gCACnB,OAAO,EAAE;oCACL,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;oCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;oCAChC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;iCACtC;6BACJ,CAAC;4BACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;wBACpE,CAAC;oBACL,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,4CAA4C;oBAC5C,MAAM,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;oBACvD,MAAM,IAAI,IAAI,GAAG,IAAI,CAAC;gBAC1B,CAAC;YACL,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;YACpC,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAE5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACpC,IAAI,UAAU,CAAC,KAAK,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;wBACzC,qCAAqC;wBAErC,gCAAgC;wBAChC,IAAI,UAAU,CAAC,OAAO,KAAK,mCAAmC;4BAC1D,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACtE,OAAO,CAAC,wBAAwB;wBACpC,CAAC;wBAED,wCAAwC;wBACxC,IAAI,UAAU,CAAC,OAAO,KAAK,wBAAwB,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC;4BAC9E,IAAI,CAAC;gCACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gCACpD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oCACd,MAAM,CAAC,IAAI,CAAC,gBAAgB,OAAO,CAAC,GAAG,EAAE,EAAE;wCACvC,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,SAAS;wCACrH,GAAG,EAAE,OAAO,CAAC,GAAG;wCAChB,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC,CAAC,SAAS;wCAC1E,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM;qCAC9B,CAAC,CAAC;oCACH,OAAO;gCACX,CAAC;4BACL,CAAC;4BAAC,OAAO,CAAC,EAAE,CAAC;gCACT,kCAAkC;4BACtC,CAAC;wBACL,CAAC;wBAED,QAAQ,UAAU,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;4BACrC,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV;gCACI,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjE,CAAC;oBACL,CAAC;yBAAM,CAAC;wBACJ,yDAAyD;wBACzD,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,kEAAkE;oBAClE,IAAI,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;wBAC9B,mCAAmC;wBACnC,IAAI,IAAI,CAAC,QAAQ,CAAC,kCAAkC,CAAC;4BACjD,IAAI,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACxD,OAAO,CAAC,aAAa;wBACzB,CAAC;wBACD,wDAAwD;wBACxD,IAAI,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,sBAAsB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;4BACzG,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,mBAAmB,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;4BAClF,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;wBACnC,CAAC;oBACL,CAAC;yBAAM,IAAI,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC;wBACrB,gCAAgC;wBAChC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;YACL,CAAC;YACD,WAAW,IAAI,KAAK,CAAC;QACzB,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,EAAE;YAC9B,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,IAAI,CAAC,CAAC;YACnD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;YACrD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAC;YAE1D,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACb,MAAM,aAAa,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;gBACpC,MAAM,CAAC,IAAI,CAAC,2BAA2B,EAAE,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC;gBACxE,QAAQ,CAAC,aAAa,IAAI,IAAI,CAAC,CAAC;YACpC,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,KAAK,CAAC,8BAA8B,IAAI,aAAa,WAAW,EAAE,CAAC,CAAC,CAAC;YACpF,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YAC7B,MAAM,CAAC,GAAG,CAAC,CAAC;QAChB,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC;AAGD,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,IAAS,EAAE,EAAE;IAC5C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,CAAC,CAAC;IAErE,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,IAAI,CAAC;YACD,MAAM,SAAS,GAAG,MAAM,qBAAqB,CAAC,IAAI,CAAC,CAAC;YACpD,IAAI,SAAS,EAAE,CAAC;gBACZ,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;gBACtC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;gBAC7B,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAC5C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;gBACjC,CAAC;gBACD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oBACd,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;gBAC3B,CAAC;YACL,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,CAAC,wCAAwC,CAAC,CAAC;gBACtD,OAAO;YACX,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,CAAC,KAAK,CAAC,oBAAoB,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;YAClD,OAAO;QACX,CAAC;IACL,CAAC;IAED,IAAI,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAED,IAAI,CAAC;QACD,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,aAAa,CAAC;QAEhD,MAAM,aAAa,GAAG,MAAM,aAAa,CAAC,aAAa,CAAC,CAAC;QACzD,MAAM,MAAM,GAAG,aAAa,EAAE,OAAiB,IAAI,EAAE,CAAC;QAEtD,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;YACtB,MAAM,CAAC,KAAK,CAAC,yFAAyF,CAAC,CAAC;YACxG,OAAO;QACX,CAAC;QAED,IAAI,CAAC,GAAG,EAAE,CAAC;YACP,MAAM,CAAC,KAAK,CAAC,oDAAoD,CAAC,CAAC;YACnE,OAAO;QACX,CAAC;QAED,IAAI,WAAW,GAAkB,IAAI,CAAC;QAEtC,IAAI,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpD,gBAAgB;YAChB,KAAK,MAAM,SAAS,IAAI,OAAO,EAAE,CAAC;gBAC9B,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oBACrB,MAAM,CAAC,KAAK,CAAC,6BAA6B,SAAS,EAAE,CAAC,CAAC;oBACvD,OAAO;gBACX,CAAC;YACL,CAAC;YACD,IAAI,CAAC,MAAM,EAAE,CAAC;gBACV,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;gBACxD,OAAO;YACX,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,qBAAqB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,MAAM,GAAG,CAAC,CAAC;YACjF,WAAW,GAAG,MAAM,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,MAAM,EAAE,CAAC;YAChB,iBAAiB;YACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,MAAM,GAAG,CAAC,CAAC;YACvD,WAAW,GAAG,MAAM,WAAW,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,WAAW,EAAE,CAAC;YACd,MAAM,IAAI,GAAG,SAAS,CAAC,aAAa,CAAC,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;YACpE,KAAK,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YAC5B,MAAM,CAAC,IAAI,CAAC,mBAAmB,OAAO,EAAE,CAAC,CAAC;QAC9C,CAAC;aAAM,CAAC;YACJ,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QAC9C,CAAC;IAEL,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,4CAA4C,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACzG,CAAC;AACL,CAAC,CAAC"}
\ No newline at end of file
+//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"images.js","sourceRoot":"","sources":["../../src/commands/images.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAClC,OAAO,EAAE,IAAI,IAAI,KAAK,EAAE,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACH,YAAY,EACZ,QAAQ,EACR,UAAU,EACb,MAAM,SAAS,CAAC;AACjB,OAAO,EAAW,MAAM,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAE9D,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EAAE,MAAM,IAAI,aAAa,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAC3C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C,SAAS,sBAAsB,CAAC,GAAuB,EAAE,QAAkB;IACvE,IAAI,MAAc,CAAC;IAEnB,IAAI,GAAG,EAAE,CAAC;QACN,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACnE,IAAI,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YACnC,MAAM,GAAG,WAAW,CAAC;QACzB,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;QACvC,CAAC;IACL,CAAC;SAAM,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC7B,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACvC,CAAC;SAAM,CAAC;QACJ,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,kCAAkC;IAC9D,CAAC;IAED,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,GAAG,CAAC,CAAC;IAEV,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtB,MAAM,gBAAgB,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/E,MAAM,KAAK,GAAG,gBAAgB,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;QACpD,IAAI,KAAK,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;YACvB,YAAY,GAAG,gBAAgB,CAAC,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;YAC1D,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC;QACnC,CAAC;aAAM,CAAC;YACJ,YAAY,GAAG,gBAAgB,CAAC;QACpC,CAAC;IACL,CAAC;SAAM,CAAC;QACJ,YAAY,GAAG,WAAW,CAAC;IAC/B,CAAC;IAED,IAAI,WAAW,CAAC;IAChB,IAAI,YAAY,CAAC;IACjB,GAAG,CAAC;QACA,WAAW,GAAG,GAAG,YAAY,QAAQ,CAAC,MAAM,CAAC;QAC7C,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;QACjD,CAAC,EAAE,CAAC;IACR,CAAC,QAAQ,MAAM,CAAC,YAAY,CAAC,EAAE;IAE/B,OAAO,YAAY,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IAElB,sEAAsE;IACtE,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;IAClE,oFAAoF;IACpF,MAAM,cAAc,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAC5E,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC;QACxB,CAAC,CAAC,SAAS,CAAC;IAEZ,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAEjE,+DAA+D;IAC/D,IAAI,WAAmB,CAAC;IACxB,IAAI,cAAsB,CAAC;IAE3B,QAAQ,OAAO,CAAC,QAAQ,EAAE,CAAC;QACvB,KAAK,OAAO;YACR,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,eAAe,CAAC;YACjC,MAAM;QACV,KAAK,QAAQ;YACT,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV,KAAK,OAAO;YACR,WAAW,GAAG,UAAU,CAAC;YACzB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV;YACI,MAAM,IAAI,KAAK,CAAC,yBAAyB,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrE,CAAC;IAED,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,cAAc,CAAC,CAAC;AACvE,CAAC;AAED,MAAM,CAAC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IACnC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC,IAAI,CAAC;QACpC,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI;QACX,QAAQ,EAAE,IAAI;QACd,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;KACZ,CAAC,CAAC;IAEH,OAAO,UAAU,CAAC,MAAM,CAAC;QACrB,GAAG,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,0BAA0B,CAAC;QAChE,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,gCAAgC,CAAC,CAAC,QAAQ,CAAC,+CAA+C,CAAC;QACrH,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,kDAAkD,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,2CAA2C,CAAC;KACtF,CAAC,CAAC;AACP,CAAC,CAAA;AAED,KAAK,UAAU,qBAAqB,CAAC,IAAS;IAC1C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU;QAC/B,QAAQ,EAAE,CAAC,EAAE,8BAA8B;QAC3C,iBAAiB,EAAE,wEAAwE;KAC9F,CAAC,CAAC;IAEH,OAAO,IAAI,OAAO,CAAC,CAAC,QAAQ,EAAE,MAAM,EAAE,EAAE;QACpC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC;YACtB,OAAO,MAAM,CAAC,IAAI,KAAK,CAAC,iCAAiC,UAAU,8EAA8E,CAAC,CAAC,CAAC;QACxJ,CAAC;QAED,wBAAwB;QACxB,MAAM,IAAI,GAAa,EAAE,CAAC;QAE1B,oBAAoB;QACpB,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAC7E,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,IAAI,CAAC,GAAG,gBAAgB,CAAC,CAAC;QACnC,CAAC;QAED,cAAc;QACd,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC;YACT,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QACnC,CAAC;QAED,UAAU;QACV,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC;QACjC,CAAC;QAED,aAAa;QACb,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACd,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,CAAC;QAED,MAAM,YAAY,GAAG,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC;QAElF,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,WAAW,GAAG,EAAE,CAAC;QAErB,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YAC1C,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAE9B,yCAAyC;YACzC,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAC5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACjC,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBACpC,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBAEnD,2CAA2C;wBAC3C,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;wBAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;wBACnD,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;wBACnG,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;wBAE5D,MAAM,cAAc,GAAG;4BACnB,GAAG,EAAE,4BAA4B;4BACjC,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,IAAI;4BAC3B,GAAG,EAAE,IAAI,CAAC,GAAG,IAAI,IAAI;4BACrB,MAAM,EAAE,MAAM,IAAI,IAAI;4BACtB,KAAK,EAAE,gBAAgB;yBAC1B,CAAC;wBAEF,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;wBAClD,MAAM,CAAC,IAAI,CAAC,6BAA6B,EAAE,UAAU,CAAC,CAAC;wBACvD,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,gCAAgC,EAAE,cAAc,CAAC,CAAC;wBAE9D,kBAAkB;wBAClB,KAAK,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;4BACvC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oCACpB,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;oCAC5C,MAAM,MAAM,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;oCAC9C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;oCAC/F,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;oCAE1C,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,2BAA2B;wCAChC,MAAM;wCACN,QAAQ;wCACR,QAAQ,EAAE,SAAS;qCACtB,CAAC;oCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;oCAChE,MAAM,CAAC,IAAI,CAAC,uBAAuB,QAAQ,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,GAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gCACzF,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,yBAAyB,SAAS,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;4BACtE,CAAC;wBACL,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBAC3C,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBACnD,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;wBAClC,IAAI,YAAY,IAAI,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;4BACzC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC;oCACvB,UAAU,CAAC,YAAY,CAAC,CAAC;oCACzB,MAAM,CAAC,IAAI,CAAC,gCAAgC,YAAY,EAAE,CAAC,CAAC;oCAC5D,MAAM,eAAe,GAAG;wCACpB,GAAG,EAAE,2BAA2B;wCAChC,IAAI,EAAE,YAAY;qCACrB,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC;gCACtE,CAAC;qCAAM,CAAC;oCACJ,MAAM,CAAC,IAAI,CAAC,mCAAmC,YAAY,EAAE,CAAC,CAAC;oCAC/D,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,qBAAqB;wCAC1B,IAAI,EAAE,YAAY;wCAClB,KAAK,EAAE,2BAA2B;qCACrC,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCACpE,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,4BAA4B,YAAY,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;gCACxE,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,qBAAqB;oCAC1B,IAAI,EAAE,YAAY;oCAClB,KAAK,EAAE,KAAK,CAAC,OAAO;iCACvB,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;6BAAM,CAAC;4BACJ,MAAM,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;wBACtE,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,kBAAkB,EAAE,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;wBAEvD,yDAAyD;wBACzD,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC;wBACjC,MAAM,QAAQ,GAAG,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC;wBACrC,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;wBAE3B,2EAA2E;wBAC3E,IAAI,CAAC;4BAED,MAAM,YAAY,GAAG,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;4BAC9D,MAAM,CAAC,IAAI,CAAC,uDAAuD,YAAY,EAAE,CAAC,CAAC;4BAEnF,MAAM,CAAC,IAAI,CAAC,kCAAkC,SAAS,GAAG,CAAC,CAAC;4BAE5D,IAAI,WAAW,GAAkB,IAAI,CAAC;4BAEtC,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gCACtB,gBAAgB;gCAChB,MAAM,CAAC,IAAI,CAAC,qBAAqB,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,SAAS,GAAG,CAAC,CAAC;gCACrF,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,IAAI;oCACP,MAAM,EAAE,SAAS;oCACjB,OAAO,EAAE,QAAQ;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,SAAS,CAAC,SAAS,EAAE,QAAQ,EAAE,aAAa,CAAC,CAAC;4BACtE,CAAC;iCAAM,CAAC;gCACJ,iBAAiB;gCACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,SAAS,GAAG,CAAC,CAAC;gCAC1D,MAAM,YAAY,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;gCACjC,OAAO,YAAY,CAAC,OAAO,CAAC;gCAC5B,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,YAAY;oCACf,MAAM,EAAE,SAAS;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;4BAC9D,CAAC;4BAED,IAAI,WAAW,EAAE,CAAC;gCACd,KAAK,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;gCACjC,MAAM,CAAC,IAAI,CAAC,qBAAqB,YAAY,EAAE,CAAC,CAAC;gCAEjD,uDAAuD;gCACvD,MAAM,YAAY,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;gCAEpD,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,2BAA2B;oCAChC,MAAM,EAAE,YAAY;oCACpB,QAAQ,EAAE,WAAW;oCACrB,QAAQ,EAAE,YAAY;iCACzB,CAAC;gCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCAChE,MAAM,CAAC,IAAI,CAAC,kCAAkC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;4BACjF,CAAC;iCAAM,CAAC;gCACJ,MAAM,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;gCAE3C,yBAAyB;gCACzB,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,kBAAkB;oCACvB,KAAK,EAAE,0BAA0B;iCACpC,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;wBAAC,OAAO,KAAK,EAAE,CAAC;4BACb,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;4BAC5E,MAAM,UAAU,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;4BAEpE,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE,KAAK,EAAC,YAAY,CAAC,CAAC;4BAExD,MAAM,CAAC,KAAK,CAAC,qBAAqB,EAAE;gCAChC,OAAO,EAAE,YAAY;gCACrB,KAAK,EAAE,UAAU;gCACjB,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;gCAChC,KAAK,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;6BAC9C,CAAC,CAAC;4BAEH,kCAAkC;4BAClC,MAAM,aAAa,GAAG;gCAClB,GAAG,EAAE,kBAAkB;gCACvB,KAAK,EAAE,YAAY;gCACnB,OAAO,EAAE;oCACL,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;oCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;oCAChC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;iCACtC;6BACJ,CAAC;4BACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;wBACpE,CAAC;oBACL,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,4CAA4C;oBAC5C,MAAM,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;oBACvD,MAAM,IAAI,IAAI,GAAG,IAAI,CAAC;gBAC1B,CAAC;YACL,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;YACpC,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAE5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACpC,IAAI,UAAU,CAAC,KAAK,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;wBACzC,qCAAqC;wBAErC,gCAAgC;wBAChC,IAAI,UAAU,CAAC,OAAO,KAAK,mCAAmC;4BAC1D,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACtE,OAAO,CAAC,wBAAwB;wBACpC,CAAC;wBAED,wCAAwC;wBACxC,IAAI,UAAU,CAAC,OAAO,KAAK,wBAAwB,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC;4BAC9E,IAAI,CAAC;gCACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gCACpD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oCACd,MAAM,CAAC,IAAI,CAAC,gBAAgB,OAAO,CAAC,GAAG,EAAE,EAAE;wCACvC,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,SAAS;wCACrH,GAAG,EAAE,OAAO,CAAC,GAAG;wCAChB,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC,CAAC,SAAS;wCAC1E,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM;qCAC9B,CAAC,CAAC;oCACH,OAAO;gCACX,CAAC;4BACL,CAAC;4BAAC,OAAO,CAAC,EAAE,CAAC;gCACT,kCAAkC;4BACtC,CAAC;wBACL,CAAC;wBAED,QAAQ,UAAU,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;4BACrC,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV;gCACI,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjE,CAAC;oBACL,CAAC;yBAAM,CAAC;wBACJ,yDAAyD;wBACzD,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,kEAAkE;oBAClE,IAAI,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;wBAC9B,mCAAmC;wBACnC,IAAI,IAAI,CAAC,QAAQ,CAAC,kCAAkC,CAAC;4BACjD,IAAI,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACxD,OAAO,CAAC,aAAa;wBACzB,CAAC;wBACD,wDAAwD;wBACxD,IAAI,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,sBAAsB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;4BACzG,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,mBAAmB,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;4BAClF,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;wBACnC,CAAC;oBACL,CAAC;yBAAM,IAAI,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC;wBACrB,gCAAgC;wBAChC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;YACL,CAAC;YACD,WAAW,IAAI,KAAK,CAAC;QACzB,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,EAAE;YAC9B,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,IAAI,CAAC,CAAC;YACnD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;YACrD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAC;YAE1D,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACb,MAAM,aAAa,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;gBACpC,MAAM,CAAC,IAAI,CAAC,2BAA2B,EAAE,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC;gBACxE,QAAQ,CAAC,aAAa,IAAI,IAAI,CAAC,CAAC;YACpC,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,KAAK,CAAC,8BAA8B,IAAI,aAAa,WAAW,EAAE,CAAC,CAAC,CAAC;YACpF,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YAC7B,MAAM,CAAC,GAAG,CAAC,CAAC;QAChB,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,IAAS,EAAE,EAAE;IAC5C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,CAAC,CAAC;IAErE,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,IAAI,CAAC;YACD,MAAM,SAAS,GAAG,MAAM,qBAAqB,CAAC,IAAI,CAAC,CAAC;YACpD,IAAI,SAAS,EAAE,CAAC;gBACZ,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;gBACtC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;gBAC7B,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAC5C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;gBACjC,CAAC;gBACD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oBACd,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;gBAC3B,CAAC;YACL,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,CAAC,wCAAwC,CAAC,CAAC;gBACtD,OAAO;YACX,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,CAAC,KAAK,CAAC,oBAAoB,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;YAClD,OAAO;QACX,CAAC;IACL,CAAC;IAED,IAAI,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAED,IAAI,CAAC;QACD,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,aAAa,CAAC;QAEhD,MAAM,aAAa,GAAG,MAAM,aAAa,CAAC,aAAa,CAAC,CAAC;QACzD,MAAM,MAAM,GAAG,aAAa,EAAE,OAAiB,IAAI,EAAE,CAAC;QAEtD,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;YACtB,MAAM,CAAC,KAAK,CAAC,yFAAyF,CAAC,CAAC;YACxG,OAAO;QACX,CAAC;QAED,IAAI,CAAC,GAAG,EAAE,CAAC;YACP,MAAM,CAAC,KAAK,CAAC,oDAAoD,CAAC,CAAC;YACnE,OAAO;QACX,CAAC;QAED,IAAI,WAAW,GAAkB,IAAI,CAAC;QAEtC,IAAI,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpD,gBAAgB;YAChB,KAAK,MAAM,SAAS,IAAI,OAAO,EAAE,CAAC;gBAC9B,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oBACrB,MAAM,CAAC,KAAK,CAAC,6BAA6B,SAAS,EAAE,CAAC,CAAC;oBACvD,OAAO;gBACX,CAAC;YACL,CAAC;YACD,IAAI,CAAC,MAAM,EAAE,CAAC;gBACV,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;gBACxD,OAAO;YACX,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,qBAAqB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,MAAM,GAAG,CAAC,CAAC;YACjF,WAAW,GAAG,MAAM,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,MAAM,EAAE,CAAC;YAChB,iBAAiB;YACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,MAAM,GAAG,CAAC,CAAC;YACvD,WAAW,GAAG,MAAM,WAAW,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,WAAW,EAAE,CAAC;YACd,MAAM,IAAI,GAAG,SAAS,CAAC,aAAa,CAAC,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;YACpE,KAAK,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YAC5B,MAAM,CAAC,IAAI,CAAC,mBAAmB,OAAO,EAAE,CAAC,CAAC;QAC9C,CAAC;aAAM,CAAC;YACJ,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QAC9C,CAAC;IAEL,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,4CAA4C,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACzG,CAAC;AACL,CAAC,CAAC"}
\ No newline at end of file
diff --git a/packages/kbot/dist-in/data/openai_models.json b/packages/kbot/dist-in/data/openai_models.json
index 30027d56..f0870d6b 100644
--- a/packages/kbot/dist-in/data/openai_models.json
+++ b/packages/kbot/dist-in/data/openai_models.json
@@ -1,5 +1,5 @@
{
- "timestamp": 1758470050446,
+ "timestamp": 1760432036753,
"models": [
{
"id": "gpt-4-0613",
@@ -20,33 +20,33 @@
"owned_by": "openai"
},
{
- "id": "gpt-audio",
+ "id": "sora-2-pro",
"object": "model",
- "created": 1756339249,
+ "created": 1759708663,
"owned_by": "system"
},
{
- "id": "gpt-5-nano",
+ "id": "gpt-audio-mini-2025-10-06",
"object": "model",
- "created": 1754426384,
+ "created": 1759512137,
"owned_by": "system"
},
{
- "id": "gpt-audio-2025-08-28",
+ "id": "gpt-realtime-mini",
"object": "model",
- "created": 1756256146,
+ "created": 1759517133,
"owned_by": "system"
},
{
- "id": "gpt-realtime",
+ "id": "gpt-realtime-mini-2025-10-06",
"object": "model",
- "created": 1756271701,
+ "created": 1759517175,
"owned_by": "system"
},
{
- "id": "gpt-realtime-2025-08-28",
+ "id": "sora-2",
"object": "model",
- "created": 1756271773,
+ "created": 1759708615,
"owned_by": "system"
},
{
@@ -493,6 +493,66 @@
"created": 1754426303,
"owned_by": "system"
},
+ {
+ "id": "gpt-5-nano",
+ "object": "model",
+ "created": 1754426384,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-audio-2025-08-28",
+ "object": "model",
+ "created": 1756256146,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-realtime",
+ "object": "model",
+ "created": 1756271701,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-realtime-2025-08-28",
+ "object": "model",
+ "created": 1756271773,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-audio",
+ "object": "model",
+ "created": 1756339249,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-5-codex",
+ "object": "model",
+ "created": 1757527818,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-image-1-mini",
+ "object": "model",
+ "created": 1758845821,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-5-pro-2025-10-06",
+ "object": "model",
+ "created": 1759469707,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-5-pro",
+ "object": "model",
+ "created": 1759469822,
+ "owned_by": "system"
+ },
+ {
+ "id": "gpt-audio-mini",
+ "object": "model",
+ "created": 1759512027,
+ "owned_by": "system"
+ },
{
"id": "gpt-3.5-turbo-16k",
"object": "model",
diff --git a/packages/kbot/dist-in/data/openrouter_models.json b/packages/kbot/dist-in/data/openrouter_models.json
index f063ea98..73d129b6 100644
--- a/packages/kbot/dist-in/data/openrouter_models.json
+++ b/packages/kbot/dist-in/data/openrouter_models.json
@@ -1,13 +1,1095 @@
{
- "timestamp": 1758470050775,
+ "timestamp": 1760432037245,
"models": [
{
- "id": "x-ai/grok-4-fast:free",
+ "id": "inclusionai/ling-1t",
+ "canonical_slug": "inclusionai/ling-1t",
+ "hugging_face_id": "inclusionAI/Ling-1T",
+ "name": "inclusionAI: Ling-1T",
+ "created": 1760316076,
+ "description": "Ling-1T is a trillion-parameter open-weight large language model developed by inclusionAI and released under the MIT license. It represents the first flagship non-thinking model in the Ling 2.0 series, built around a sparse-activation architecture with roughly 50 billion active parameters per token. The model supports up to 128 K tokens of context and emphasizes efficient reasoning through an “Evolutionary Chain-of-Thought (Evo-CoT)” training strategy.\n\nPre-trained on more than 20 trillion reasoning-dense tokens, Ling-1T achieves strong results across code generation, mathematics, and logical reasoning benchmarks while maintaining high inference efficiency. It employs FP8 mixed-precision training, MoE routing with QK normalization, and MTP layers for compositional reasoning stability. The model also introduces LPO (Linguistics-unit Policy Optimization) for post-training alignment, enhancing sentence-level semantic control.\n\nLing-1T can perform complex text generation, multilingual reasoning, and front-end code synthesis with a focus on both functionality and aesthetics.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.000001",
+ "completion": "0.000003",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.7,
+ "top_p": 0.8,
+ "frequency_penalty": 1.05
+ }
+ },
+ {
+ "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
+ "canonical_slug": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
+ "hugging_face_id": "nvidia/Llama-3_3-Nemotron-Super-49B-v1_5",
+ "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
+ "created": 1760101395,
+ "description": "Llama-3.3-Nemotron-Super-49B-v1.5 is a 49B-parameter, English-centric reasoning/chat model derived from Meta’s Llama-3.3-70B-Instruct with a 128K context. It’s post-trained for agentic workflows (RAG, tool calling) via SFT across math, code, science, and multi-turn chat, followed by multiple RL stages; Reward-aware Preference Optimization (RPO) for alignment, RL with Verifiable Rewards (RLVR) for step-wise reasoning, and iterative DPO to refine tool-use behavior. A distillation-driven Neural Architecture Search (“Puzzle”) replaces some attention blocks and varies FFN widths to shrink memory footprint and improve throughput, enabling single-GPU (H100/H200) deployment while preserving instruction following and CoT quality.\n\nIn internal evaluations (NeMo-Skills, up to 16 runs, temp = 0.6, top_p = 0.95), the model reports strong reasoning/coding results, e.g., MATH500 pass@1 = 97.4, AIME-2024 = 87.5, AIME-2025 = 82.71, GPQA = 71.97, LiveCodeBench (24.10–25.02) = 73.58, and MMLU-Pro (CoT) = 79.53. The model targets practical inference efficiency (high tokens/s, reduced VRAM) with Transformers/vLLM support and explicit “reasoning on/off” modes (chat-first defaults, greedy recommended when disabled). Suitable for building agents, assistants, and long-context retrieval systems where balanced accuracy-to-cost and reliable tool use matter.\n",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Llama3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000001",
+ "completion": "0.0000004",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": null
+ },
+ {
+ "id": "baidu/ernie-4.5-21b-a3b-thinking",
+ "canonical_slug": "baidu/ernie-4.5-21b-a3b-thinking",
+ "hugging_face_id": "baidu/ERNIE-4.5-21B-A3B-Thinking",
+ "name": "Baidu: ERNIE 4.5 21B A3B Thinking",
+ "created": 1760048887,
+ "description": "ERNIE-4.5-21B-A3B-Thinking is Baidu's upgraded lightweight MoE model, refined to boost reasoning depth and quality for top-tier performance in logical puzzles, math, science, coding, text generation, and expert-level academic benchmarks.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000007",
+ "completion": "0.00000028",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": 65536,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "max_tokens",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.6,
+ "top_p": 0.95,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "google/gemini-2.5-flash-image",
+ "canonical_slug": "google/gemini-2.5-flash-image",
+ "hugging_face_id": "",
+ "name": "Google: Gemini 2.5 Flash Image (Nano Banana)",
+ "created": 1759870431,
+ "description": "Gemini 2.5 Flash Image, a.k.a. \"Nano Banana,\" is now generally available. It is a state of the art image generation model with contextual understanding. It is capable of image generation, edits, and multi-turn conversations. Aspect ratios can be controlled with the [image_config API Parameter](https://openrouter.ai/docs/features/multimodal/image-generation#image-aspect-ratio-configuration)",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text+image->text+image",
+ "input_modalities": [
+ "image",
+ "text"
+ ],
+ "output_modalities": [
+ "image",
+ "text"
+ ],
+ "tokenizer": "Gemini",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000003",
+ "completion": "0.0000025",
+ "request": "0",
+ "image": "0.001238",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": 8192,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "max_tokens",
+ "response_format",
+ "seed",
+ "structured_outputs",
+ "temperature",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "qwen/qwen3-vl-30b-a3b-thinking",
+ "canonical_slug": "qwen/qwen3-vl-30b-a3b-thinking",
+ "hugging_face_id": "Qwen/Qwen3-VL-30B-A3B-Thinking",
+ "name": "Qwen: Qwen3 VL 30B A3B Thinking",
+ "created": 1759794479,
+ "description": "Qwen3-VL-30B-A3B-Thinking is a multimodal model that unifies strong text generation with visual understanding for images and videos. Its Thinking variant enhances reasoning in STEM, math, and complex tasks. It excels in perception of real-world/synthetic categories, 2D/3D spatial grounding, and long-form visual comprehension, achieving competitive multimodal benchmark results. For agentic use, it handles multi-image multi-turn instructions, video timeline alignments, GUI automation, and visual coding from sketches to debugged UI. Text performance matches flagship Qwen3 models, suiting document AI, OCR, UI assistance, spatial tasks, and agent research.",
+ "context_length": 262144,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000029",
+ "completion": "0.000001",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 262144,
+ "max_completion_tokens": 262144,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.8,
+ "top_p": 0.95
+ }
+ },
+ {
+ "id": "qwen/qwen3-vl-30b-a3b-instruct",
+ "canonical_slug": "qwen/qwen3-vl-30b-a3b-instruct",
+ "hugging_face_id": "Qwen/Qwen3-VL-30B-A3B-Instruct",
+ "name": "Qwen: Qwen3 VL 30B A3B Instruct",
+ "created": 1759794476,
+ "description": "Qwen3-VL-30B-A3B-Instruct is a multimodal model that unifies strong text generation with visual understanding for images and videos. Its Instruct variant optimizes instruction-following for general multimodal tasks. It excels in perception of real-world/synthetic categories, 2D/3D spatial grounding, and long-form visual comprehension, achieving competitive multimodal benchmark results. For agentic use, it handles multi-image multi-turn instructions, video timeline alignments, GUI automation, and visual coding from sketches to debugged UI. Text performance matches flagship Qwen3 models, suiting document AI, OCR, UI assistance, spatial tasks, and agent research.",
+ "context_length": 262144,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000029",
+ "completion": "0.000001",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 262144,
+ "max_completion_tokens": 262144,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.7,
+ "top_p": 0.8
+ }
+ },
+ {
+ "id": "openai/gpt-5-pro",
+ "canonical_slug": "openai/gpt-5-pro-2025-10-06",
+ "hugging_face_id": "",
+ "name": "OpenAI: GPT-5 Pro",
+ "created": 1759776663,
+ "description": "GPT-5 Pro is OpenAI’s most advanced model, offering major improvements in reasoning, code quality, and user experience. It is optimized for complex tasks that require step-by-step reasoning, instruction following, and accuracy in high-stakes use cases. It supports test-time routing features and advanced prompt understanding, including user-specified intent like \"think hard about this.\" Improvements include reductions in hallucination, sycophancy, and better performance in coding, writing, and health-related tasks.",
+ "context_length": 400000,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "image",
+ "text",
+ "file"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "GPT",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.000015",
+ "completion": "0.00012",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 400000,
+ "max_completion_tokens": 128000,
+ "is_moderated": true
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "include_reasoning",
+ "max_tokens",
+ "reasoning",
+ "response_format",
+ "seed",
+ "structured_outputs",
+ "tool_choice",
+ "tools"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "z-ai/glm-4.6",
+ "canonical_slug": "z-ai/glm-4.6",
+ "hugging_face_id": "",
+ "name": "Z.AI: GLM 4.6",
+ "created": 1759235576,
+ "description": "Compared with GLM-4.5, this generation brings several key improvements:\n\nLonger context window: The context window has been expanded from 128K to 200K tokens, enabling the model to handle more complex agentic tasks.\nSuperior coding performance: The model achieves higher scores on code benchmarks and demonstrates better real-world performance in applications such as Claude Code、Cline、Roo Code and Kilo Code, including improvements in generating visually polished front-end pages.\nAdvanced reasoning: GLM-4.6 shows a clear improvement in reasoning performance and supports tool use during inference, leading to stronger overall capability.\nMore capable agents: GLM-4.6 exhibits stronger performance in tool using and search-based agents, and integrates more effectively within agent frameworks.\nRefined writing: Better aligns with human preferences in style and readability, and performs more naturally in role-playing scenarios.",
+ "context_length": 202752,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000005",
+ "completion": "0.00000175",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 202752,
+ "max_completion_tokens": 202752,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_a",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.6,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "anthropic/claude-sonnet-4.5",
+ "canonical_slug": "anthropic/claude-4.5-sonnet-20250929",
+ "hugging_face_id": "",
+ "name": "Anthropic: Claude Sonnet 4.5",
+ "created": 1759161676,
+ "description": "Claude Sonnet 4.5 is Anthropic’s most advanced Sonnet model to date, optimized for real-world agents and coding workflows. It delivers state-of-the-art performance on coding benchmarks such as SWE-bench Verified, with improvements across system design, code security, and specification adherence. The model is designed for extended autonomous operation, maintaining task continuity across sessions and providing fact-based progress tracking.\n\nSonnet 4.5 also introduces stronger agentic capabilities, including improved tool orchestration, speculative parallel execution, and more efficient context and memory management. With enhanced context tracking and awareness of token usage across tool calls, it is particularly well-suited for multi-context and long-running workflows. Use cases span software engineering, cybersecurity, financial analysis, research agents, and other domains requiring sustained reasoning and tool use.",
+ "context_length": 1000000,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image",
+ "file"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Claude",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.000003",
+ "completion": "0.000015",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 1000000,
+ "max_completion_tokens": 64000,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "include_reasoning",
+ "max_tokens",
+ "reasoning",
+ "stop",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 1,
+ "top_p": 1,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "deepseek/deepseek-v3.2-exp",
+ "canonical_slug": "deepseek/deepseek-v3.2-exp",
+ "hugging_face_id": "deepseek-ai/DeepSeek-V3.2-Exp",
+ "name": "DeepSeek: DeepSeek V3.2 Exp",
+ "created": 1759150481,
+ "description": "DeepSeek-V3.2-Exp is an experimental large language model released by DeepSeek as an intermediate step between V3.1 and future architectures. It introduces DeepSeek Sparse Attention (DSA), a fine-grained sparse attention mechanism designed to improve training and inference efficiency in long-context scenarios while maintaining output quality. Users can control the reasoning behaviour with the `reasoning` `enabled` boolean. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#enable-reasoning-with-default-config)\n\nThe model was trained under conditions aligned with V3.1-Terminus to enable direct comparison. Benchmarking shows performance roughly on par with V3.1 across reasoning, coding, and agentic tool-use tasks, with minor tradeoffs and gains depending on the domain. This release focuses on validating architectural optimizations for extended context lengths rather than advancing raw task accuracy, making it primarily a research-oriented model for exploring efficient transformer designs.",
+ "context_length": 163840,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "DeepSeek",
+ "instruct_type": "deepseek-v3.1"
+ },
+ "pricing": {
+ "prompt": "0.00000027",
+ "completion": "0.0000004",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 163840,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.6,
+ "top_p": 0.95,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "thedrummer/cydonia-24b-v4.1",
+ "canonical_slug": "thedrummer/cydonia-24b-v4.1",
+ "hugging_face_id": "thedrummer/cydonia-24b-v4.1",
+ "name": "TheDrummer: Cydonia 24B V4.1",
+ "created": 1758931878,
+ "description": "Uncensored and creative writing model based on Mistral Small 3.2 24B with good recall, prompt adherence, and intelligence.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000003",
+ "completion": "0.0000005",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "relace/relace-apply-3",
+ "canonical_slug": "relace/relace-apply-3",
+ "hugging_face_id": "",
+ "name": "Relace: Relace Apply 3",
+ "created": 1758891572,
+ "description": "Relace Apply 3 is a specialized code-patching LLM that merges AI-suggested edits straight into your source files. It can apply updates from GPT-4o, Claude, and others into your files at 7,500 tokens/sec on average.\n\nThe model requires the prompt to be in the following format: \n{instruction}\n{initial_code}\n{edit_snippet}\n\nZero Data Retention is enabled for Relace. Learn more about this model in their [documentation](https://docs.relace.ai/api-reference/instant-apply/apply)",
+ "context_length": 256000,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000085",
+ "completion": "0.00000125",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 256000,
+ "max_completion_tokens": 128000,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "max_tokens",
+ "seed",
+ "stop"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "google/gemini-2.5-flash-preview-09-2025",
+ "canonical_slug": "google/gemini-2.5-flash-preview-09-2025",
+ "hugging_face_id": "",
+ "name": "Google: Gemini 2.5 Flash Preview 09-2025",
+ "created": 1758820178,
+ "description": "Gemini 2.5 Flash Preview September 2025 Checkpoint is Google's state-of-the-art workhorse model, specifically designed for advanced reasoning, coding, mathematics, and scientific tasks. It includes built-in \"thinking\" capabilities, enabling it to provide responses with greater accuracy and nuanced context handling. \n\nAdditionally, Gemini 2.5 Flash is configurable through the \"max tokens for reasoning\" parameter, as described in the documentation (https://openrouter.ai/docs/use-cases/reasoning-tokens#max-tokens-for-reasoning).",
+ "context_length": 1048576,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "image",
+ "file",
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Gemini",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000003",
+ "completion": "0.0000025",
+ "request": "0",
+ "image": "0.001238",
+ "web_search": "0",
+ "internal_reasoning": "0",
+ "input_cache_read": "0.000000075",
+ "input_cache_write": "0.0000003833"
+ },
+ "top_provider": {
+ "context_length": 1048576,
+ "max_completion_tokens": 65536,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "include_reasoning",
+ "max_tokens",
+ "reasoning",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "google/gemini-2.5-flash-lite-preview-09-2025",
+ "canonical_slug": "google/gemini-2.5-flash-lite-preview-09-2025",
+ "hugging_face_id": "",
+ "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
+ "created": 1758819686,
+ "description": "Gemini 2.5 Flash-Lite is a lightweight reasoning model in the Gemini 2.5 family, optimized for ultra-low latency and cost efficiency. It offers improved throughput, faster token generation, and better performance across common benchmarks compared to earlier Flash models. By default, \"thinking\" (i.e. multi-pass reasoning) is disabled to prioritize speed, but developers can enable it via the [Reasoning API parameter](https://openrouter.ai/docs/use-cases/reasoning-tokens) to selectively trade off cost for intelligence. ",
+ "context_length": 1048576,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "file",
+ "image",
+ "text",
+ "audio"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Gemini",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000001",
+ "completion": "0.0000004",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 1048576,
+ "max_completion_tokens": 65536,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "include_reasoning",
+ "max_tokens",
+ "reasoning",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "qwen/qwen3-vl-235b-a22b-thinking",
+ "canonical_slug": "qwen/qwen3-vl-235b-a22b-thinking",
+ "hugging_face_id": "Qwen/Qwen3-VL-235B-A22B-Thinking",
+ "name": "Qwen: Qwen3 VL 235B A22B Thinking",
+ "created": 1758668690,
+ "description": "Qwen3-VL-235B-A22B Thinking is a multimodal model that unifies strong text generation with visual understanding across images and video. The Thinking model is optimized for multimodal reasoning in STEM and math. The series emphasizes robust perception (recognition of diverse real-world and synthetic categories), spatial understanding (2D/3D grounding), and long-form visual comprehension, with competitive results on public multimodal benchmarks for both perception and reasoning.\n\nBeyond analysis, Qwen3-VL supports agentic interaction and tool use: it can follow complex instructions over multi-image, multi-turn dialogues; align text to video timelines for precise temporal queries; and operate GUI elements for automation tasks. The models also enable visual coding workflows, turning sketches or mockups into code and assisting with UI debugging, while maintaining strong text-only performance comparable to the flagship Qwen3 language models. This makes Qwen3-VL suitable for production scenarios spanning document AI, multilingual OCR, software/UI assistance, spatial/embodied tasks, and research on vision-language agents.",
+ "context_length": 262144,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000045",
+ "completion": "0.0000035",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 262144,
+ "max_completion_tokens": 262144,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.8,
+ "top_p": 0.95,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "qwen/qwen3-vl-235b-a22b-instruct",
+ "canonical_slug": "qwen/qwen3-vl-235b-a22b-instruct",
+ "hugging_face_id": "Qwen/Qwen3-VL-235B-A22B-Instruct",
+ "name": "Qwen: Qwen3 VL 235B A22B Instruct",
+ "created": 1758668687,
+ "description": "Qwen3-VL-235B-A22B Instruct is an open-weight multimodal model that unifies strong text generation with visual understanding across images and video. The Instruct model targets general vision-language use (VQA, document parsing, chart/table extraction, multilingual OCR). The series emphasizes robust perception (recognition of diverse real-world and synthetic categories), spatial understanding (2D/3D grounding), and long-form visual comprehension, with competitive results on public multimodal benchmarks for both perception and reasoning.\n\nBeyond analysis, Qwen3-VL supports agentic interaction and tool use: it can follow complex instructions over multi-image, multi-turn dialogues; align text to video timelines for precise temporal queries; and operate GUI elements for automation tasks. The models also enable visual coding workflows—turning sketches or mockups into code and assisting with UI debugging—while maintaining strong text-only performance comparable to the flagship Qwen3 language models. This makes Qwen3-VL suitable for production scenarios spanning document AI, multilingual OCR, software/UI assistance, spatial/embodied tasks, and research on vision-language agents.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000003",
+ "completion": "0.0000012",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.7,
+ "top_p": 0.8,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "qwen/qwen3-max",
+ "canonical_slug": "qwen/qwen3-max",
+ "hugging_face_id": "",
+ "name": "Qwen: Qwen3 Max",
+ "created": 1758662808,
+ "description": "Qwen3-Max is an updated release built on the Qwen3 series, offering major improvements in reasoning, instruction following, multilingual support, and long-tail knowledge coverage compared to the January 2025 version. It delivers higher accuracy in math, coding, logic, and science tasks, follows complex instructions in Chinese and English more reliably, reduces hallucinations, and produces higher-quality responses for open-ended Q&A, writing, and conversation. The model supports over 100 languages with stronger translation and commonsense reasoning, and is optimized for retrieval-augmented generation (RAG) and tool calling, though it does not include a dedicated “thinking” mode.",
+ "context_length": 256000,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000012",
+ "completion": "0.000006",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0",
+ "input_cache_read": "0.00000024"
+ },
+ "top_provider": {
+ "context_length": 256000,
+ "max_completion_tokens": 32768,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_p"
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "qwen/qwen3-coder-plus",
+ "canonical_slug": "qwen/qwen3-coder-plus",
+ "hugging_face_id": "",
+ "name": "Qwen: Qwen3 Coder Plus",
+ "created": 1758662707,
+ "description": "Qwen3 Coder Plus is Alibaba's proprietary version of the Open Source Qwen3 Coder 480B A35B. It is a powerful coding agent model specializing in autonomous programming via tool calling and environment interaction, combining coding proficiency with versatile general-purpose abilities.",
+ "context_length": 128000,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.000001",
+ "completion": "0.000005",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0",
+ "input_cache_read": "0.0000001"
+ },
+ "top_provider": {
+ "context_length": 128000,
+ "max_completion_tokens": 65536,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "openai/gpt-5-codex",
+ "canonical_slug": "openai/gpt-5-codex",
+ "hugging_face_id": "",
+ "name": "OpenAI: GPT-5 Codex",
+ "created": 1758643403,
+ "description": "GPT-5-Codex is a specialized version of GPT-5 optimized for software engineering and coding workflows. It is designed for both interactive development sessions and long, independent execution of complex engineering tasks. The model supports building projects from scratch, feature development, debugging, large-scale refactoring, and code review. Compared to GPT-5, Codex is more steerable, adheres closely to developer instructions, and produces cleaner, higher-quality code outputs. Reasoning effort can be adjusted with the `reasoning.effort` parameter. Read the [docs here](https://openrouter.ai/docs/use-cases/reasoning-tokens#reasoning-effort-level)\n\nCodex integrates into developer environments including the CLI, IDE extensions, GitHub, and cloud tasks. It adapts reasoning effort dynamically—providing fast responses for small tasks while sustaining extended multi-hour runs for large projects. The model is trained to perform structured code reviews, catching critical flaws by reasoning over dependencies and validating behavior against tests. It also supports multimodal inputs such as images or screenshots for UI development and integrates tool use for search, dependency installation, and environment setup. Codex is intended specifically for agentic coding applications.",
+ "context_length": 400000,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "GPT",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000125",
+ "completion": "0.00001",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0",
+ "input_cache_read": "0.000000125"
+ },
+ "top_provider": {
+ "context_length": 400000,
+ "max_completion_tokens": 128000,
+ "is_moderated": true
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "include_reasoning",
+ "max_tokens",
+ "reasoning",
+ "response_format",
+ "seed",
+ "structured_outputs",
+ "tool_choice",
+ "tools"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "deepseek/deepseek-v3.1-terminus",
+ "canonical_slug": "deepseek/deepseek-v3.1-terminus",
+ "hugging_face_id": "deepseek-ai/DeepSeek-V3.1-Terminus",
+ "name": "DeepSeek: DeepSeek V3.1 Terminus",
+ "created": 1758548275,
+ "description": "DeepSeek-V3.1 Terminus is an update to [DeepSeek V3.1](/deepseek/deepseek-chat-v3.1) that maintains the model's original capabilities while addressing issues reported by users, including language consistency and agent capabilities, further optimizing the model's performance in coding and search agents. It is a large hybrid reasoning model (671B parameters, 37B active) that supports both thinking and non-thinking modes. It extends the DeepSeek-V3 base with a two-phase long-context training process, reaching up to 128K tokens, and uses FP8 microscaling for efficient inference. Users can control the reasoning behaviour with the `reasoning` `enabled` boolean. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#enable-reasoning-with-default-config)\n\nThe model improves tool use, code generation, and reasoning efficiency, achieving performance comparable to DeepSeek-R1 on difficult benchmarks while responding more quickly. It supports structured tool calling, code agents, and search agents, making it suitable for research, coding, and agentic workflows. ",
+ "context_length": 163840,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "DeepSeek",
+ "instruct_type": "deepseek-v3.1"
+ },
+ "pricing": {
+ "prompt": "0.00000023",
+ "completion": "0.0000009",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 163840,
+ "max_completion_tokens": 163840,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "x-ai/grok-4-fast",
"canonical_slug": "x-ai/grok-4-fast",
"hugging_face_id": "",
- "name": "xAI: Grok 4 Fast (free)",
+ "name": "xAI: Grok 4 Fast",
"created": 1758240090,
- "description": "Grok 4 Fast is xAI's latest multimodal model with SOTA cost-efficiency and a 2M token context window. It comes in two flavors: non-reasoning and reasoning. Read more about the model on xAI's [news post](http://x.ai/news/grok-4-fast). Reasoning can be enabled using the `reasoning` `enabled` parameter in the API. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#controlling-reasoning-tokens)\n\nPrompts and completions may be used by xAI or OpenRouter to improve future models.",
+ "description": "Grok 4 Fast is xAI's latest multimodal model with SOTA cost-efficiency and a 2M token context window. It comes in two flavors: non-reasoning and reasoning. Read more about the model on xAI's [news post](http://x.ai/news/grok-4-fast). Reasoning can be enabled using the `reasoning` `enabled` parameter in the API. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#controlling-reasoning-tokens)\n\nPrompts and completions on Grok 4 Fast Free may be used by xAI or OpenRouter to improve future models.",
"context_length": 2000000,
"architecture": {
"modality": "text+image->text",
@@ -22,12 +1104,13 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0",
- "completion": "0",
+ "prompt": "0.0000002",
+ "completion": "0.0000005",
"request": "0",
"image": "0",
"web_search": "0",
- "internal_reasoning": "0"
+ "internal_reasoning": "0",
+ "input_cache_read": "0.00000005"
},
"top_provider": {
"context_length": 2000000,
@@ -48,7 +1131,70 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
+ },
+ {
+ "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
+ "canonical_slug": "alibaba/tongyi-deepresearch-30b-a3b",
+ "hugging_face_id": "Alibaba-NLP/Tongyi-DeepResearch-30B-A3B",
+ "name": "Tongyi DeepResearch 30B A3B (free)",
+ "created": 1758210804,
+ "description": "Tongyi DeepResearch is an agentic large language model developed by Tongyi Lab, with 30 billion total parameters activating only 3 billion per token. It's optimized for long-horizon, deep information-seeking tasks and delivers state-of-the-art performance on benchmarks like Humanity's Last Exam, BrowserComp, BrowserComp-ZH, WebWalkerQA, GAIA, xbench-DeepSearch, and FRAMES. This makes it superior for complex agentic search, reasoning, and multi-step problem-solving compared to prior models.\n\nThe model includes a fully automated synthetic data pipeline for scalable pre-training, fine-tuning, and reinforcement learning. It uses large-scale continual pre-training on diverse agentic data to boost reasoning and stay fresh. It also features end-to-end on-policy RL with a customized Group Relative Policy Optimization, including token-level gradients and negative sample filtering for stable training. The model supports ReAct for core ability checks and an IterResearch-based 'Heavy' mode for max performance through test-time scaling. It's ideal for advanced research agents, tool use, and heavy inference workflows.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0",
+ "completion": "0",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "alibaba/tongyi-deepresearch-30b-a3b",
@@ -71,7 +1217,7 @@
},
"pricing": {
"prompt": "0.00000009",
- "completion": "0.00000045",
+ "completion": "0.0000004",
"request": "0",
"image": "0",
"web_search": "0",
@@ -84,16 +1230,28 @@
},
"per_request_limits": null,
"supported_parameters": [
+ "frequency_penalty",
"include_reasoning",
"max_tokens",
+ "min_p",
+ "presence_penalty",
"reasoning",
+ "repetition_penalty",
"response_format",
+ "seed",
+ "stop",
"structured_outputs",
"temperature",
"tool_choice",
"tools",
+ "top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "qwen/qwen3-coder-flash",
@@ -138,53 +1296,12 @@
"tool_choice",
"tools",
"top_p"
- ]
- },
- {
- "id": "qwen/qwen3-coder-plus",
- "canonical_slug": "qwen/qwen3-coder-plus",
- "hugging_face_id": "",
- "name": "Qwen: Qwen3 Coder Plus",
- "created": 1758115194,
- "description": "Qwen3 Coder Plus is Alibaba's proprietary version of the Open Source Qwen3 Coder 480B A35B. It is a powerful coding agent model specializing in autonomous programming via tool calling and environment interaction, combining coding proficiency with versatile general-purpose abilities.",
- "context_length": 128000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Qwen3",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.000001",
- "completion": "0.000005",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0",
- "input_cache_read": "0.0000001"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 65536,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "arcee-ai/afm-4.5b",
@@ -206,8 +1323,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000001",
- "completion": "0.0000004",
+ "prompt": "0.000000048",
+ "completion": "0.00000015",
"request": "0",
"image": "0",
"web_search": "0",
@@ -232,7 +1349,12 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "opengvlab/internvl3-78b",
@@ -255,8 +1377,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000003",
- "completion": "0.00000013",
+ "prompt": "0.00000007",
+ "completion": "0.00000026",
"request": "0",
"image": "0",
"web_search": "0",
@@ -264,7 +1386,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -284,7 +1406,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-next-80b-a3b-thinking",
@@ -306,8 +1429,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000001",
- "completion": "0.0000008",
+ "prompt": "0.00000014",
+ "completion": "0.0000012",
"request": "0",
"image": "0",
"web_search": "0",
@@ -339,7 +1462,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-next-80b-a3b-instruct",
@@ -370,7 +1494,7 @@
},
"top_provider": {
"context_length": 262144,
- "max_completion_tokens": null,
+ "max_completion_tokens": 262144,
"is_moderated": false
},
"per_request_limits": null,
@@ -392,7 +1516,62 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "meituan/longcat-flash-chat:free",
+ "canonical_slug": "meituan/longcat-flash-chat",
+ "hugging_face_id": "meituan-longcat/LongCat-Flash-Chat",
+ "name": "Meituan: LongCat Flash Chat (free)",
+ "created": 1757427658,
+ "description": "LongCat-Flash-Chat is a large-scale Mixture-of-Experts (MoE) model with 560B total parameters, of which 18.6B–31.3B (≈27B on average) are dynamically activated per input. It introduces a shortcut-connected MoE design to reduce communication overhead and achieve high throughput while maintaining training stability through advanced scaling strategies such as hyperparameter transfer, deterministic computation, and multi-stage optimization.\n\nThis release, LongCat-Flash-Chat, is a non-thinking foundation model optimized for conversational and agentic tasks. It supports long context windows up to 128K tokens and shows competitive performance across reasoning, coding, instruction following, and domain benchmarks, with particular strengths in tool use and complex multi-step interactions.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0",
+ "completion": "0",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "meituan/longcat-flash-chat",
@@ -414,8 +1593,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000012",
- "completion": "0.0000006",
+ "prompt": "0.00000015",
+ "completion": "0.00000075",
"request": "0",
"image": "0",
"web_search": "0",
@@ -423,27 +1602,16 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
"max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "seed",
- "stop",
"temperature",
- "tool_choice",
- "tools",
- "top_k",
- "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-plus-2025-07-28",
@@ -488,7 +1656,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-plus-2025-07-28:thinking",
@@ -535,7 +1704,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nvidia/nemotron-nano-9b-v2:free",
@@ -577,7 +1747,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nvidia/nemotron-nano-9b-v2",
@@ -615,6 +1786,7 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
+ "logit_bias",
"max_tokens",
"min_p",
"presence_penalty",
@@ -628,52 +1800,8 @@
"tools",
"top_k",
"top_p"
- ]
- },
- {
- "id": "qwen/qwen3-max",
- "canonical_slug": "qwen/qwen3-max",
- "hugging_face_id": "",
- "name": "Qwen: Qwen3 Max",
- "created": 1757076567,
- "description": "Qwen3-Max is an updated release built on the Qwen3 series, offering major improvements in reasoning, instruction following, multilingual support, and long-tail knowledge coverage compared to the January 2025 version. It delivers higher accuracy in math, coding, logic, and science tasks, follows complex instructions in Chinese and English more reliably, reduces hallucinations, and produces higher-quality responses for open-ended Q&A, writing, and conversation. The model supports over 100 languages with stronger translation and commonsense reasoning, and is optimized for retrieval-augmented generation (RAG) and tool calling, though it does not include a dedicated “thinking” mode.",
- "context_length": 256000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Qwen3",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000012",
- "completion": "0.000006",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0",
- "input_cache_read": "0.00000024"
- },
- "top_provider": {
- "context_length": 256000,
- "max_completion_tokens": 32768,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "temperature",
- "tool_choice",
- "tools",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "moonshotai/kimi-k2-0905",
@@ -695,8 +1823,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000038",
- "completion": "0.00000152",
+ "prompt": "0.00000039",
+ "completion": "0.0000019",
"request": "0",
"image": "0",
"web_search": "0",
@@ -704,7 +1832,7 @@
},
"top_provider": {
"context_length": 262144,
- "max_completion_tokens": null,
+ "max_completion_tokens": 262144,
"is_moderated": false
},
"per_request_limits": null,
@@ -726,58 +1854,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "bytedance/seed-oss-36b-instruct",
- "canonical_slug": "bytedance/seed-oss-36b-instruct",
- "hugging_face_id": "ByteDance-Seed/Seed-OSS-36B-Instruct",
- "name": "ByteDance: Seed OSS 36B Instruct",
- "created": 1756834704,
- "description": "Seed-OSS-36B-Instruct is a 36B-parameter instruction-tuned reasoning language model from ByteDance’s Seed team, released under Apache-2.0. The model is optimized for general instruction following with strong performance in reasoning, mathematics, coding, tool use/agentic workflows, and multilingual tasks, and is intended for international (i18n) use cases. It is not currently possible to control the reasoning effort.",
- "context_length": 131072,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Other",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.00000016",
- "completion": "0.00000065",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 131072,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "include_reasoning",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "reasoning",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepcogito/cogito-v2-preview-llama-109b-moe",
@@ -828,7 +1906,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepcogito/cogito-v2-preview-deepseek-671b",
@@ -876,7 +1955,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "stepfun-ai/step3",
@@ -923,7 +2003,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-30b-a3b-thinking-2507",
@@ -978,7 +2059,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "x-ai/grok-code-fast-1",
@@ -1028,7 +2110,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nousresearch/hermes-4-70b",
@@ -1059,7 +2142,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -1081,7 +2164,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nousresearch/hermes-4-405b",
@@ -1103,8 +2187,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
+ "prompt": "0.0000003",
+ "completion": "0.0000012",
"request": "0",
"image": "0",
"web_search": "0",
@@ -1112,7 +2196,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -1134,15 +2218,16 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.5-flash-image-preview",
"canonical_slug": "google/gemini-2.5-flash-image-preview",
"hugging_face_id": "",
- "name": "Google: Gemini 2.5 Flash Image Preview",
+ "name": "Google: Gemini 2.5 Flash Image Preview (Nano Banana)",
"created": 1756218977,
- "description": "Gemini 2.5 Flash Image Preview, AKA Nano Banana is a state of the art image generation model with contextual understanding. It is capable of image generation, edits, and multi-turn conversations.",
+ "description": "Gemini 2.5 Flash Image Preview, a.k.a. \"Nano Banana,\" is a state of the art image generation model with contextual understanding. It is capable of image generation, edits, and multi-turn conversations.",
"context_length": 32768,
"architecture": {
"modality": "text+image->text+image",
@@ -1178,7 +2263,12 @@
"structured_outputs",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "deepseek/deepseek-chat-v3.1:free",
@@ -1187,7 +2277,7 @@
"name": "DeepSeek: DeepSeek V3.1 (free)",
"created": 1755779628,
"description": "DeepSeek-V3.1 is a large hybrid reasoning model (671B parameters, 37B active) that supports both thinking and non-thinking modes via prompt templates. It extends the DeepSeek-V3 base with a two-phase long-context training process, reaching up to 128K tokens, and uses FP8 microscaling for efficient inference. Users can control the reasoning behaviour with the `reasoning` `enabled` boolean. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#enable-reasoning-with-default-config)\n\nThe model improves tool use, code generation, and reasoning efficiency, achieving performance comparable to DeepSeek-R1 on difficult benchmarks while responding more quickly. It supports structured tool calling, code agents, and search agents, making it suitable for research, coding, and agentic workflows. \n\nIt succeeds the [DeepSeek V3-0324](/deepseek/deepseek-chat-v3-0324) model and performs well on a variety of tasks.",
- "context_length": 163840,
+ "context_length": 163800,
"architecture": {
"modality": "text->text",
"input_modalities": [
@@ -1208,28 +2298,20 @@
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 163840,
+ "context_length": 163800,
"max_completion_tokens": null,
- "is_moderated": false
+ "is_moderated": true
},
"per_request_limits": null,
"supported_parameters": [
- "frequency_penalty",
"include_reasoning",
"max_tokens",
- "min_p",
- "presence_penalty",
"reasoning",
- "repetition_penalty",
- "response_format",
"seed",
"stop",
- "temperature",
- "tool_choice",
- "tools",
- "top_k",
- "top_p"
- ]
+ "temperature"
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-chat-v3.1",
@@ -1251,8 +2333,8 @@
"instruct_type": "deepseek-v3.1"
},
"pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
+ "prompt": "0.0000002",
+ "completion": "0.0000008",
"request": "0",
"image": "0",
"web_search": "0",
@@ -1260,7 +2342,7 @@
},
"top_provider": {
"context_length": 163840,
- "max_completion_tokens": null,
+ "max_completion_tokens": 163840,
"is_moderated": false
},
"per_request_limits": null,
@@ -1284,56 +2366,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "deepseek/deepseek-v3.1-base",
- "canonical_slug": "deepseek/deepseek-v3.1-base",
- "hugging_face_id": "deepseek-ai/DeepSeek-V3.1-Base",
- "name": "DeepSeek: DeepSeek V3.1 Base",
- "created": 1755727017,
- "description": "This is a base model, trained only for raw next-token prediction. Unlike instruct/chat models, it has not been fine-tuned to follow user instructions. Prompts need to be written more like training text or examples rather than simple requests (e.g., “Translate the following sentence…” instead of just “Translate this”).\n\nDeepSeek-V3.1 Base is a 671B parameter open Mixture-of-Experts (MoE) language model with 37B active parameters per forward pass and a context length of 128K tokens. Trained on 14.8T tokens using FP8 mixed precision, it achieves high training efficiency and stability, with strong performance across language, reasoning, math, and coding tasks. \n",
- "context_length": 163840,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "DeepSeek",
- "instruct_type": "none"
- },
- "pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 163840,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o-audio-preview",
@@ -1385,7 +2419,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-medium-3.1",
@@ -1433,7 +2468,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "baidu/ernie-4.5-21b-a3b",
@@ -1470,9 +2508,7 @@
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"repetition_penalty",
"seed",
@@ -1480,7 +2516,12 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.8,
+ "top_p": 0.8,
+ "frequency_penalty": null
+ }
},
{
"id": "baidu/ernie-4.5-vl-28b-a3b",
@@ -1519,9 +2560,7 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"reasoning",
"repetition_penalty",
@@ -1530,7 +2569,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "z-ai/glm-4.5v",
@@ -1553,7 +2593,7 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000005",
+ "prompt": "0.0000006",
"completion": "0.0000018",
"request": "0",
"image": "0",
@@ -1562,7 +2602,7 @@
},
"top_provider": {
"context_length": 65536,
- "max_completion_tokens": 65536,
+ "max_completion_tokens": 16384,
"is_moderated": false
},
"per_request_limits": null,
@@ -1582,7 +2622,12 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.75,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "ai21/jamba-mini-1.7",
@@ -1625,7 +2670,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "ai21/jamba-large-1.7",
@@ -1668,7 +2714,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-5-chat",
@@ -1711,7 +2758,8 @@
"response_format",
"seed",
"structured_outputs"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-5",
@@ -1735,13 +2783,13 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.000000625",
- "completion": "0.000005",
+ "prompt": "0.00000125",
+ "completion": "0.00001",
"request": "0",
"image": "0",
- "web_search": "0.005",
+ "web_search": "0.01",
"internal_reasoning": "0",
- "input_cache_read": "0.0000000625"
+ "input_cache_read": "0.000000125"
},
"top_provider": {
"context_length": 400000,
@@ -1758,7 +2806,12 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "openai/gpt-5-mini",
@@ -1805,7 +2858,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-5-nano",
@@ -1852,49 +2906,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
- },
- {
- "id": "openai/gpt-oss-120b:free",
- "canonical_slug": "openai/gpt-oss-120b",
- "hugging_face_id": "openai/gpt-oss-120b",
- "name": "OpenAI: gpt-oss-120b (free)",
- "created": 1754414231,
- "description": "gpt-oss-120b is an open-weight, 117B-parameter Mixture-of-Experts (MoE) language model from OpenAI designed for high-reasoning, agentic, and general-purpose production use cases. It activates 5.1B parameters per forward pass and is optimized to run on a single H100 GPU with native MXFP4 quantization. The model supports configurable reasoning depth, full chain-of-thought access, and native tool use, including function calling, browsing, and structured output generation.",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "GPT",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "include_reasoning",
- "max_tokens",
- "reasoning",
- "seed",
- "stop",
- "temperature"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-oss-120b",
@@ -1916,8 +2929,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000005",
- "completion": "0.00000025",
+ "prompt": "0.00000004",
+ "completion": "0.0000004",
"request": "0",
"image": "0",
"web_search": "0",
@@ -1925,7 +2938,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -1949,7 +2962,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": null
},
{
"id": "openai/gpt-oss-20b:free",
@@ -1985,14 +2999,25 @@
},
"per_request_limits": null,
"supported_parameters": [
+ "frequency_penalty",
"include_reasoning",
+ "logit_bias",
+ "logprobs",
"max_tokens",
+ "min_p",
+ "presence_penalty",
"reasoning",
+ "repetition_penalty",
"response_format",
+ "seed",
+ "stop",
"structured_outputs",
"temperature",
+ "top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-oss-20b",
@@ -2015,7 +3040,7 @@
},
"pricing": {
"prompt": "0.00000003",
- "completion": "0.00000015",
+ "completion": "0.00000014",
"request": "0",
"image": "0",
"web_search": "0",
@@ -2023,7 +3048,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": 32768,
+ "max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null,
@@ -2047,7 +3072,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-opus-4.1",
@@ -2075,7 +3101,7 @@
"completion": "0.000075",
"request": "0",
"image": "0.024",
- "web_search": "0.01",
+ "web_search": "0",
"internal_reasoning": "0",
"input_cache_read": "0.0000015",
"input_cache_write": "0.00001875"
@@ -2083,7 +3109,7 @@
"top_provider": {
"context_length": 200000,
"max_completion_tokens": 32000,
- "is_moderated": true
+ "is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
@@ -2096,7 +3122,12 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "mistralai/codestral-2508",
@@ -2143,7 +3174,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "qwen/qwen3-coder-30b-a3b-instruct",
@@ -2165,8 +3199,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000007",
- "completion": "0.00000028",
+ "prompt": "0.00000006",
+ "completion": "0.00000025",
"request": "0",
"image": "0",
"web_search": "0",
@@ -2174,7 +3208,7 @@
},
"top_provider": {
"context_length": 262144,
- "max_completion_tokens": null,
+ "max_completion_tokens": 262144,
"is_moderated": false
},
"per_request_limits": null,
@@ -2196,7 +3230,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-30b-a3b-instruct-2507",
@@ -2218,8 +3253,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000007",
- "completion": "0.00000028",
+ "prompt": "0.00000008",
+ "completion": "0.00000033",
"request": "0",
"image": "0",
"web_search": "0",
@@ -2227,7 +3262,7 @@
},
"top_provider": {
"context_length": 262144,
- "max_completion_tokens": null,
+ "max_completion_tokens": 262144,
"is_moderated": false
},
"per_request_limits": null,
@@ -2249,7 +3284,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "z-ai/glm-4.5",
@@ -2271,8 +3307,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000041",
- "completion": "0.00000165",
+ "prompt": "0.00000035",
+ "completion": "0.00000155",
"request": "0",
"image": "0",
"web_search": "0",
@@ -2280,7 +3316,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -2305,7 +3341,12 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.75,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "z-ai/glm-4.5-air:free",
@@ -2336,7 +3377,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -2350,6 +3391,7 @@
"presence_penalty",
"reasoning",
"repetition_penalty",
+ "response_format",
"seed",
"stop",
"temperature",
@@ -2358,7 +3400,12 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.75,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "z-ai/glm-4.5-air",
@@ -2394,17 +3441,29 @@
},
"per_request_limits": null,
"supported_parameters": [
+ "frequency_penalty",
"include_reasoning",
+ "logit_bias",
+ "logprobs",
"max_tokens",
+ "presence_penalty",
"reasoning",
"response_format",
"seed",
+ "stop",
"structured_outputs",
"temperature",
"tool_choice",
"tools",
+ "top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.75,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "qwen/qwen3-235b-a22b-thinking-2507",
@@ -2426,8 +3485,8 @@
"instruct_type": "qwen3"
},
"pricing": {
- "prompt": "0.0000001",
- "completion": "0.00000039",
+ "prompt": "0.00000011",
+ "completion": "0.0000006",
"request": "0",
"image": "0",
"web_search": "0",
@@ -2435,7 +3494,7 @@
},
"top_provider": {
"context_length": 262144,
- "max_completion_tokens": null,
+ "max_completion_tokens": 262144,
"is_moderated": false
},
"per_request_limits": null,
@@ -2459,7 +3518,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "z-ai/glm-4-32b",
@@ -2500,7 +3560,12 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.75,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "qwen/qwen3-coder:free",
@@ -2551,7 +3616,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-coder",
@@ -2582,7 +3648,7 @@
},
"top_provider": {
"context_length": 262144,
- "max_completion_tokens": null,
+ "max_completion_tokens": 262144,
"is_moderated": false
},
"per_request_limits": null,
@@ -2604,7 +3670,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "bytedance/ui-tars-1.5-7b",
@@ -2652,7 +3719,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.5-flash-lite",
@@ -2704,7 +3772,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-235b-a22b-2507",
@@ -2726,8 +3795,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000001",
- "completion": "0.0000001",
+ "prompt": "0.00000008",
+ "completion": "0.00000055",
"request": "0",
"image": "0",
"web_search": "0",
@@ -2757,7 +3826,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "switchpoint/router",
@@ -2801,7 +3871,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "moonshotai/kimi-k2:free",
@@ -2833,26 +3904,16 @@
"top_provider": {
"context_length": 32768,
"max_completion_tokens": null,
- "is_moderated": false
+ "is_moderated": true
},
"per_request_limits": null,
"supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
"max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
"seed",
"stop",
- "temperature",
- "tool_choice",
- "tools",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
+ "temperature"
+ ],
+ "default_parameters": {}
},
{
"id": "moonshotai/kimi-k2",
@@ -2905,7 +3966,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "thudm/glm-4.1v-9b-thinking",
@@ -2944,9 +4006,7 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"reasoning",
"repetition_penalty",
@@ -2955,7 +4015,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/devstral-medium",
@@ -3002,7 +4063,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/devstral-small",
@@ -3052,7 +4116,10 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "cognitivecomputations/dolphin-mistral-24b-venice-edition:free",
@@ -3097,7 +4164,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "x-ai/grok-4",
@@ -3147,7 +4215,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3n-e2b-it:free",
@@ -3191,7 +4260,8 @@
"stop",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "tencent/hunyuan-a13b-instruct:free",
@@ -3242,7 +4312,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "tencent/hunyuan-a13b-instruct",
@@ -3295,7 +4366,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "tngtech/deepseek-r1t2-chimera:free",
@@ -3346,7 +4418,60 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "tngtech/deepseek-r1t2-chimera",
+ "canonical_slug": "tngtech/deepseek-r1t2-chimera",
+ "hugging_face_id": "tngtech/DeepSeek-TNG-R1T2-Chimera",
+ "name": "TNG: DeepSeek R1T2 Chimera",
+ "created": 1751986985,
+ "description": "DeepSeek-TNG-R1T2-Chimera is the second-generation Chimera model from TNG Tech. It is a 671 B-parameter mixture-of-experts text-generation model assembled from DeepSeek-AI’s R1-0528, R1, and V3-0324 checkpoints with an Assembly-of-Experts merge. The tri-parent design yields strong reasoning performance while running roughly 20 % faster than the original R1 and more than 2× faster than R1-0528 under vLLM, giving a favorable cost-to-intelligence trade-off. The checkpoint supports contexts up to 60 k tokens in standard use (tested to ~130 k) and maintains consistent token behaviour, making it suitable for long-context analysis, dialogue and other open-ended generation tasks.",
+ "context_length": 163840,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "DeepSeek",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000003",
+ "completion": "0.0000012",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 163840,
+ "max_completion_tokens": 163840,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "include_reasoning",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "reasoning",
+ "repetition_penalty",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "morph/morph-v3-large",
@@ -3354,7 +4479,7 @@
"hugging_face_id": "",
"name": "Morph: Morph V3 Large",
"created": 1751910858,
- "description": "Morph's high-accuracy apply model for complex code edits. 2000+ tokens/sec with 98% accuracy for precise code transformations.",
+ "description": "Morph's high-accuracy apply model for complex code edits. ~4,500 tokens/sec with 98% accuracy for precise code transformations.\n\nThe model requires the prompt to be in the following format: \n{instruction}\n{initial_code}\n{edit_snippet}\n\nZero Data Retention is enabled for Morph. Learn more about this model in their [documentation](https://docs.morphllm.com/quickstart)",
"context_length": 81920,
"architecture": {
"modality": "text->text",
@@ -3385,7 +4510,12 @@
"max_tokens",
"stop",
"temperature"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "morph/morph-v3-fast",
@@ -3393,7 +4523,7 @@
"hugging_face_id": "",
"name": "Morph: Morph V3 Fast",
"created": 1751910002,
- "description": "Morph's fastest apply model for code edits. 4500+ tokens/sec with 96% accuracy for rapid code transformations.",
+ "description": "Morph's fastest apply model for code edits. ~10,500 tokens/sec with 96% accuracy for rapid code transformations.\n\nThe model requires the prompt to be in the following format: \n{instruction}\n{initial_code}\n{edit_snippet}\n\nZero Data Retention is enabled for Morph. Learn more about this model in their [documentation](https://docs.morphllm.com/quickstart)",
"context_length": 81920,
"architecture": {
"modality": "text->text",
@@ -3407,8 +4537,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000009",
- "completion": "0.0000019",
+ "prompt": "0.0000008",
+ "completion": "0.0000012",
"request": "0",
"image": "0",
"web_search": "0",
@@ -3424,7 +4554,12 @@
"max_tokens",
"stop",
"temperature"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "baidu/ernie-4.5-vl-424b-a47b",
@@ -3463,9 +4598,7 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"reasoning",
"repetition_penalty",
@@ -3474,7 +4607,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "baidu/ernie-4.5-300b-a47b",
@@ -3511,9 +4645,7 @@
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"repetition_penalty",
"response_format",
@@ -3523,7 +4655,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "thedrummer/anubis-70b-v1.1",
@@ -3532,7 +4665,7 @@
"name": "TheDrummer: Anubis 70B V1.1",
"created": 1751208347,
"description": "TheDrummer's Anubis v1.1 is an unaligned, creative Llama 3.3 70B model focused on providing character-driven roleplay & stories. It excels at gritty, visceral prose, unique character adherence, and coherent narratives, while maintaining the instruction following Llama 3.3 70B is known for.",
- "context_length": 16384,
+ "context_length": 131072,
"architecture": {
"modality": "text->text",
"input_modalities": [
@@ -3545,16 +4678,16 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000004",
- "completion": "0.0000007",
+ "prompt": "0.00000065",
+ "completion": "0.000001",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 16384,
- "max_completion_tokens": null,
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -3565,14 +4698,13 @@
"min_p",
"presence_penalty",
"repetition_penalty",
- "response_format",
"seed",
"stop",
- "structured_outputs",
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "inception/mercury",
@@ -3619,7 +4751,12 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "mistralai/mistral-small-3.2-24b-instruct:free",
@@ -3672,7 +4809,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/mistral-small-3.2-24b-instruct",
@@ -3681,7 +4821,7 @@
"name": "Mistral: Mistral Small 3.2 24B",
"created": 1750443016,
"description": "Mistral-Small-3.2-24B-Instruct-2506 is an updated 24B parameter model from Mistral optimized for instruction following, repetition reduction, and improved function calling. Compared to the 3.1 release, version 3.2 significantly improves accuracy on WildBench and Arena Hard, reduces infinite generations, and delivers gains in tool use and structured output tasks.\n\nIt supports image and text inputs with structured outputs, function/tool calling, and strong performance across coding (HumanEval+, MBPP), STEM (MMLU, MATH, GPQA), and vision benchmarks (ChartQA, DocVQA).",
- "context_length": 128000,
+ "context_length": 131072,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -3695,16 +4835,16 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.000000075",
- "completion": "0.0000002",
+ "prompt": "0.00000006",
+ "completion": "0.00000018",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 128000,
- "max_completion_tokens": null,
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -3726,7 +4866,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "minimax/minimax-m1",
@@ -3748,8 +4891,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.0000003",
- "completion": "0.00000165",
+ "prompt": "0.0000004",
+ "completion": "0.0000022",
"request": "0",
"image": "0",
"web_search": "0",
@@ -3764,9 +4907,7 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"reasoning",
"repetition_penalty",
@@ -3778,7 +4919,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.5-flash-lite-preview-06-17",
@@ -3807,6 +4949,7 @@
"completion": "0.0000004",
"request": "0",
"image": "0",
+ "audio": "0.0000003",
"web_search": "0",
"internal_reasoning": "0",
"input_cache_read": "0.000000025",
@@ -3830,7 +4973,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.5-flash",
@@ -3882,7 +5026,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.5-pro",
@@ -3934,7 +5079,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "moonshotai/kimi-dev-72b:free",
@@ -3985,7 +5131,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "moonshotai/kimi-dev-72b",
@@ -4029,7 +5176,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o3-pro",
@@ -4075,7 +5223,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "x-ai/grok-3-mini",
@@ -4125,7 +5274,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "x-ai/grok-3",
@@ -4175,7 +5325,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/magistral-small-2506",
@@ -4224,7 +5375,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/magistral-medium-2506",
@@ -4273,7 +5427,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/magistral-medium-2506:thinking",
@@ -4322,7 +5479,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "google/gemini-2.5-pro-preview",
@@ -4374,7 +5534,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-0528-qwen3-8b:free",
@@ -4425,7 +5586,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-0528-qwen3-8b",
@@ -4434,7 +5596,7 @@
"name": "DeepSeek: Deepseek R1 0528 Qwen3 8B",
"created": 1748538543,
"description": "DeepSeek-R1-0528 is a lightly upgraded release of DeepSeek R1 that taps more compute and smarter post-training tricks, pushing its reasoning and inference to the brink of flagship models like O3 and Gemini 2.5 Pro.\nIt now tops math, programming, and logic leaderboards, showcasing a step-change in depth-of-thought.\nThe distilled variant, DeepSeek-R1-0528-Qwen3-8B, transfers this chain-of-thought into an 8 B-parameter form, beating standard Qwen3 8B by +10 pp and tying the 235 B “thinking” giant on AIME 2024.",
- "context_length": 131072,
+ "context_length": 32768,
"architecture": {
"modality": "text->text",
"input_modalities": [
@@ -4447,16 +5609,16 @@
"instruct_type": "deepseek-r1"
},
"pricing": {
- "prompt": "0.00000001",
- "completion": "0.00000005",
+ "prompt": "0.00000003",
+ "completion": "0.00000011",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 131072,
- "max_completion_tokens": null,
+ "context_length": 32768,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -4476,7 +5638,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-0528:free",
@@ -4527,7 +5690,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-0528",
@@ -4558,7 +5722,7 @@
},
"top_provider": {
"context_length": 163840,
- "max_completion_tokens": null,
+ "max_completion_tokens": 163840,
"is_moderated": false
},
"per_request_limits": null,
@@ -4582,7 +5746,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-opus-4",
@@ -4610,7 +5775,7 @@
"completion": "0.000075",
"request": "0",
"image": "0.024",
- "web_search": "0.01",
+ "web_search": "0",
"internal_reasoning": "0",
"input_cache_read": "0.0000015",
"input_cache_write": "0.00001875"
@@ -4618,7 +5783,7 @@
"top_provider": {
"context_length": 200000,
"max_completion_tokens": 32000,
- "is_moderated": true
+ "is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
@@ -4631,7 +5796,12 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "anthropic/claude-sonnet-4",
@@ -4680,7 +5850,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/devstral-small-2505:free",
@@ -4731,7 +5902,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/devstral-small-2505",
@@ -4753,8 +5927,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000014",
+ "prompt": "0.00000005",
+ "completion": "0.00000022",
"request": "0",
"image": "0",
"web_search": "0",
@@ -4762,7 +5936,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -4784,7 +5958,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "google/gemma-3n-e4b-it:free",
@@ -4828,7 +6005,8 @@
"stop",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3n-e4b-it",
@@ -4874,7 +6052,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/codex-mini",
@@ -4920,7 +6099,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.3-8b-instruct:free",
@@ -4965,7 +6145,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nousresearch/deephermes-3-mistral-24b-preview",
@@ -4987,8 +6168,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000013",
- "completion": "0.00000051",
+ "prompt": "0.00000015",
+ "completion": "0.00000059",
"request": "0",
"image": "0",
"web_search": "0",
@@ -4996,7 +6177,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -5016,7 +6197,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-medium-3",
@@ -5064,7 +6246,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "google/gemini-2.5-pro-preview-05-06",
@@ -5116,7 +6301,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "arcee-ai/spotlight",
@@ -5163,7 +6349,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "arcee-ai/maestro-reasoning",
@@ -5209,7 +6396,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "arcee-ai/virtuoso-large",
@@ -5257,7 +6445,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "arcee-ai/coder-large",
@@ -5303,7 +6492,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "microsoft/phi-4-reasoning-plus",
@@ -5352,7 +6542,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "inception/mercury-coder",
@@ -5399,7 +6590,12 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "qwen/qwen3-4b:free",
@@ -5448,7 +6644,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-prover-v2",
@@ -5495,7 +6692,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-guard-4-12b",
@@ -5544,7 +6742,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-30b-a3b:free",
@@ -5595,7 +6794,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-30b-a3b",
@@ -5626,7 +6826,7 @@
},
"top_provider": {
"context_length": 40960,
- "max_completion_tokens": null,
+ "max_completion_tokens": 40960,
"is_moderated": false
},
"per_request_limits": null,
@@ -5650,7 +6850,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-8b:free",
@@ -5701,7 +6902,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-8b",
@@ -5739,9 +6941,7 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"reasoning",
"repetition_penalty",
@@ -5750,7 +6950,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-14b:free",
@@ -5801,7 +7002,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-14b",
@@ -5823,8 +7025,8 @@
"instruct_type": "qwen3"
},
"pricing": {
- "prompt": "0.00000006",
- "completion": "0.00000024",
+ "prompt": "0.00000005",
+ "completion": "0.00000022",
"request": "0",
"image": "0",
"web_search": "0",
@@ -5856,7 +7058,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-32b",
@@ -5878,8 +7081,8 @@
"instruct_type": "qwen3"
},
"pricing": {
- "prompt": "0.00000003",
- "completion": "0.00000013",
+ "prompt": "0.00000005",
+ "completion": "0.0000002",
"request": "0",
"image": "0",
"web_search": "0",
@@ -5887,7 +7090,7 @@
},
"top_provider": {
"context_length": 40960,
- "max_completion_tokens": null,
+ "max_completion_tokens": 40960,
"is_moderated": false
},
"per_request_limits": null,
@@ -5911,7 +7114,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-235b-a22b:free",
@@ -5966,7 +7170,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen3-235b-a22b",
@@ -6021,7 +7226,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "tngtech/deepseek-r1t-chimera:free",
@@ -6072,7 +7278,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "tngtech/deepseek-r1t-chimera",
@@ -6094,8 +7301,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
+ "prompt": "0.0000003",
+ "completion": "0.0000012",
"request": "0",
"image": "0",
"web_search": "0",
@@ -6103,7 +7310,7 @@
},
"top_provider": {
"context_length": 163840,
- "max_completion_tokens": null,
+ "max_completion_tokens": 163840,
"is_moderated": false
},
"per_request_limits": null,
@@ -6123,7 +7330,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "microsoft/mai-ds-r1:free",
@@ -6174,7 +7382,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "microsoft/mai-ds-r1",
@@ -6196,8 +7405,8 @@
"instruct_type": "deepseek-r1"
},
"pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
+ "prompt": "0.0000003",
+ "completion": "0.0000012",
"request": "0",
"image": "0",
"web_search": "0",
@@ -6205,7 +7414,7 @@
},
"top_provider": {
"context_length": 163840,
- "max_completion_tokens": null,
+ "max_completion_tokens": 163840,
"is_moderated": false
},
"per_request_limits": null,
@@ -6225,7 +7434,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "thudm/glm-z1-32b",
@@ -6247,8 +7457,8 @@
"instruct_type": "deepseek-r1"
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000014",
+ "prompt": "0.00000005",
+ "completion": "0.00000022",
"request": "0",
"image": "0",
"web_search": "0",
@@ -6256,7 +7466,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -6276,7 +7486,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o4-mini-high",
@@ -6323,7 +7534,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o3",
@@ -6370,7 +7582,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o4-mini",
@@ -6417,7 +7630,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "shisa-ai/shisa-v2-llama3.3-70b:free",
@@ -6466,7 +7680,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "shisa-ai/shisa-v2-llama3.3-70b",
@@ -6488,8 +7703,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000014",
+ "prompt": "0.00000005",
+ "completion": "0.00000022",
"request": "0",
"image": "0",
"web_search": "0",
@@ -6497,7 +7712,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -6515,7 +7730,58 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "qwen/qwen2.5-coder-7b-instruct",
+ "canonical_slug": "qwen/qwen2.5-coder-7b-instruct",
+ "hugging_face_id": "Qwen/Qwen2.5-Coder-7B-Instruct",
+ "name": "Qwen: Qwen2.5 Coder 7B Instruct",
+ "created": 1744734887,
+ "description": "Qwen2.5-Coder-7B-Instruct is a 7B parameter instruction-tuned language model optimized for code-related tasks such as code generation, reasoning, and bug fixing. Based on the Qwen2.5 architecture, it incorporates enhancements like RoPE, SwiGLU, RMSNorm, and GQA attention with support for up to 128K tokens using YaRN-based extrapolation. It is trained on a large corpus of source code, synthetic data, and text-code grounding, providing robust performance across programming languages and agentic coding workflows.\n\nThis model is part of the Qwen2.5-Coder family and offers strong compatibility with tools like vLLM for efficient deployment. Released under the Apache 2.0 license.",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000003",
+ "completion": "0.00000009",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4.1",
@@ -6569,7 +7835,8 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4.1-mini",
@@ -6623,7 +7890,8 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4.1-nano",
@@ -6676,7 +7944,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "eleutherai/llemma_7b",
@@ -6722,7 +7991,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "alfredpros/codellama-7b-instruct-solidity",
@@ -6768,7 +8038,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "arliai/qwq-32b-arliai-rpr-v1:free",
@@ -6819,7 +8090,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "arliai/qwq-32b-arliai-rpr-v1",
@@ -6841,8 +8113,8 @@
"instruct_type": "deepseek-r1"
},
"pricing": {
- "prompt": "0.00000002",
- "completion": "0.00000007",
+ "prompt": "0.00000003",
+ "completion": "0.00000011",
"request": "0",
"image": "0",
"web_search": "0",
@@ -6850,7 +8122,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -6870,7 +8142,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "agentica-org/deepcoder-14b-preview:free",
@@ -6921,7 +8194,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "agentica-org/deepcoder-14b-preview",
@@ -6972,111 +8246,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "moonshotai/kimi-vl-a3b-thinking:free",
- "canonical_slug": "moonshotai/kimi-vl-a3b-thinking",
- "hugging_face_id": "moonshotai/Kimi-VL-A3B-Thinking",
- "name": "MoonshotAI: Kimi VL A3B Thinking (free)",
- "created": 1744304841,
- "description": "Kimi-VL is a lightweight Mixture-of-Experts vision-language model that activates only 2.8B parameters per step while delivering strong performance on multimodal reasoning and long-context tasks. The Kimi-VL-A3B-Thinking variant, fine-tuned with chain-of-thought and reinforcement learning, excels in math and visual reasoning benchmarks like MathVision, MMMU, and MathVista, rivaling much larger models such as Qwen2.5-VL-7B and Gemma-3-12B. It supports 128K context and high-resolution input via its MoonViT encoder.",
- "context_length": 131072,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "image",
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Other",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 131072,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "include_reasoning",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "reasoning",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
- },
- {
- "id": "moonshotai/kimi-vl-a3b-thinking",
- "canonical_slug": "moonshotai/kimi-vl-a3b-thinking",
- "hugging_face_id": "moonshotai/Kimi-VL-A3B-Thinking",
- "name": "MoonshotAI: Kimi VL A3B Thinking",
- "created": 1744304841,
- "description": "Kimi-VL is a lightweight Mixture-of-Experts vision-language model that activates only 2.8B parameters per step while delivering strong performance on multimodal reasoning and long-context tasks. The Kimi-VL-A3B-Thinking variant, fine-tuned with chain-of-thought and reinforcement learning, excels in math and visual reasoning benchmarks like MathVision, MMMU, and MathVista, rivaling much larger models such as Qwen2.5-VL-7B and Gemma-3-12B. It supports 128K context and high-resolution input via its MoonViT encoder.",
- "context_length": 131072,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "image",
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Other",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.00000002",
- "completion": "0.00000007",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 131072,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "include_reasoning",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "reasoning",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "x-ai/grok-3-mini-beta",
@@ -7125,7 +8296,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "x-ai/grok-3-beta",
@@ -7174,7 +8346,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nvidia/llama-3.1-nemotron-ultra-253b-v1",
@@ -7223,7 +8396,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-4-maverick:free",
@@ -7269,7 +8443,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-4-maverick",
@@ -7323,7 +8498,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-4-scout:free",
@@ -7369,7 +8545,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-4-scout",
@@ -7378,7 +8555,7 @@
"name": "Meta: Llama 4 Scout",
"created": 1743881519,
"description": "Llama 4 Scout 17B Instruct (16E) is a mixture-of-experts (MoE) language model developed by Meta, activating 17 billion parameters out of a total of 109B. It supports native multimodal input (text and image) and multilingual output (text and code) across 12 supported languages. Designed for assistant-style interaction and visual reasoning, Scout uses 16 experts per forward pass and features a context length of 10 million tokens, with a training corpus of ~40 trillion tokens.\n\nBuilt for high efficiency and local or commercial deployment, Llama 4 Scout incorporates early fusion for seamless modality integration. It is instruction-tuned for use in multilingual chat, captioning, and image understanding tasks. Released under the Llama 4 Community License, it was last trained on data up to August 2024 and launched publicly on April 5, 2025.",
- "context_length": 1048576,
+ "context_length": 327680,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -7395,13 +8572,13 @@
"prompt": "0.00000008",
"completion": "0.0000003",
"request": "0",
- "image": "0",
+ "image": "0.0003342",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 1048576,
- "max_completion_tokens": 1048576,
+ "context_length": 327680,
+ "max_completion_tokens": 16384,
"is_moderated": false
},
"per_request_limits": null,
@@ -7423,7 +8600,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "allenai/molmo-7b-d",
@@ -7471,7 +8649,12 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "qwen/qwen2.5-vl-32b-instruct:free",
@@ -7480,57 +8663,6 @@
"name": "Qwen: Qwen2.5 VL 32B Instruct (free)",
"created": 1742839838,
"description": "Qwen2.5-VL-32B is a multimodal vision-language model fine-tuned through reinforcement learning for enhanced mathematical reasoning, structured outputs, and visual problem-solving capabilities. It excels at visual analysis tasks, including object recognition, textual interpretation within images, and precise event localization in extended videos. Qwen2.5-VL-32B demonstrates state-of-the-art performance across multimodal benchmarks such as MMMU, MathVista, and VideoMME, while maintaining strong reasoning and clarity in text-based tasks like MMLU, mathematical problem-solving, and code generation.",
- "context_length": 8192,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "text",
- "image"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Qwen",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 8192,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "response_format",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
- },
- {
- "id": "qwen/qwen2.5-vl-32b-instruct",
- "canonical_slug": "qwen/qwen2.5-vl-32b-instruct",
- "hugging_face_id": "Qwen/Qwen2.5-VL-32B-Instruct",
- "name": "Qwen: Qwen2.5 VL 32B Instruct",
- "created": 1742839838,
- "description": "Qwen2.5-VL-32B is a multimodal vision-language model fine-tuned through reinforcement learning for enhanced mathematical reasoning, structured outputs, and visual problem-solving capabilities. It excels at visual analysis tasks, including object recognition, textual interpretation within images, and precise event localization in extended videos. Qwen2.5-VL-32B demonstrates state-of-the-art performance across multimodal benchmarks such as MMMU, MathVista, and VideoMME, while maintaining strong reasoning and clarity in text-based tasks like MMLU, mathematical problem-solving, and code generation.",
"context_length": 16384,
"architecture": {
"modality": "text+image->text",
@@ -7545,8 +8677,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000014",
+ "prompt": "0",
+ "completion": "0",
"request": "0",
"image": "0",
"web_search": "0",
@@ -7558,6 +8690,58 @@
"is_moderated": false
},
"per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "qwen/qwen2.5-vl-32b-instruct",
+ "canonical_slug": "qwen/qwen2.5-vl-32b-instruct",
+ "hugging_face_id": "Qwen/Qwen2.5-VL-32B-Instruct",
+ "name": "Qwen: Qwen2.5 VL 32B Instruct",
+ "created": 1742839838,
+ "description": "Qwen2.5-VL-32B is a multimodal vision-language model fine-tuned through reinforcement learning for enhanced mathematical reasoning, structured outputs, and visual problem-solving capabilities. It excels at visual analysis tasks, including object recognition, textual interpretation within images, and precise event localization in extended videos. Qwen2.5-VL-32B demonstrates state-of-the-art performance across multimodal benchmarks such as MMMU, MathVista, and VideoMME, while maintaining strong reasoning and clarity in text-based tasks like MMLU, mathematical problem-solving, and code generation.",
+ "context_length": 16384,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000005",
+ "completion": "0.00000022",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 16384,
+ "max_completion_tokens": 16384,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
"logit_bias",
@@ -7574,7 +8758,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-chat-v3-0324:free",
@@ -7625,7 +8810,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-chat-v3-0324",
@@ -7647,8 +8833,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
+ "prompt": "0.00000024",
+ "completion": "0.00000084",
"request": "0",
"image": "0",
"web_search": "0",
@@ -7656,7 +8842,7 @@
},
"top_provider": {
"context_length": 163840,
- "max_completion_tokens": null,
+ "max_completion_tokens": 163840,
"is_moderated": false
},
"per_request_limits": null,
@@ -7678,7 +8864,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o1-pro",
@@ -7722,7 +8909,8 @@
"response_format",
"seed",
"structured_outputs"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-small-3.1-24b-instruct:free",
@@ -7776,7 +8964,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/mistral-small-3.1-24b-instruct",
@@ -7785,7 +8976,7 @@
"name": "Mistral: Mistral Small 3.1 24B",
"created": 1742238937,
"description": "Mistral Small 3.1 24B Instruct is an upgraded variant of Mistral Small 3 (2501), featuring 24 billion parameters with advanced multimodal capabilities. It provides state-of-the-art performance in text-based reasoning and vision tasks, including image analysis, programming, mathematical reasoning, and multilingual support across dozens of languages. Equipped with an extensive 128k token context window and optimized for efficient local inference, it supports use cases such as conversational agents, function calling, long-document comprehension, and privacy-sensitive deployments. The updated version is [Mistral Small 3.2](mistralai/mistral-small-3.2-24b-instruct)",
- "context_length": 131072,
+ "context_length": 128000,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -7799,16 +8990,16 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000015",
+ "prompt": "0.00000005",
+ "completion": "0.0000001",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 131072,
- "max_completion_tokens": 96000,
+ "context_length": 128000,
+ "max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null,
@@ -7830,7 +9021,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "allenai/olmo-2-0325-32b-instruct",
@@ -7852,8 +9046,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.000001",
- "completion": "0.0000015",
+ "prompt": "0.0000002",
+ "completion": "0.00000035",
"request": "0",
"image": "0",
"web_search": "0",
@@ -7877,7 +9071,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3-4b-it:free",
@@ -7920,7 +9115,8 @@
"structured_outputs",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3-4b-it",
@@ -7929,7 +9125,7 @@
"name": "Google: Gemma 3 4B",
"created": 1741905510,
"description": "Gemma 3 introduces multimodality, supporting vision-language input and text outputs. It handles context windows up to 128k tokens, understands over 140 languages, and offers improved math, reasoning, and chat capabilities, including structured outputs and function calling.",
- "context_length": 131072,
+ "context_length": 96000,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -7943,21 +9139,23 @@
"instruct_type": "gemma"
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000008",
+ "prompt": "0.00000001703012",
+ "completion": "0.0000000681536",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 131072,
+ "context_length": 96000,
"max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
+ "logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
@@ -7967,8 +9165,10 @@
"stop",
"temperature",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3-12b-it:free",
@@ -8005,20 +9205,12 @@
},
"per_request_limits": null,
"supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
"max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
"seed",
- "stop",
"temperature",
- "top_k",
- "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3-12b-it",
@@ -8027,7 +9219,7 @@
"name": "Google: Gemma 3 12B",
"created": 1741902625,
"description": "Gemma 3 introduces multimodality, supporting vision-language input and text outputs. It handles context windows up to 128k tokens, understands over 140 languages, and offers improved math, reasoning, and chat capabilities, including structured outputs and function calling. Gemma 3 12B is the second largest in the family of Gemma 3 models after [Gemma 3 27B](google/gemma-3-27b-it)",
- "context_length": 96000,
+ "context_length": 131072,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -8041,16 +9233,16 @@
"instruct_type": "gemma"
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000014",
+ "prompt": "0.00000003",
+ "completion": "0.0000001",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 96000,
- "max_completion_tokens": 8192,
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -8065,11 +9257,13 @@
"response_format",
"seed",
"stop",
+ "structured_outputs",
"temperature",
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "cohere/command-a",
@@ -8115,7 +9309,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o-mini-search-preview",
@@ -8155,7 +9350,8 @@
"response_format",
"structured_outputs",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o-search-preview",
@@ -8195,7 +9391,8 @@
"response_format",
"structured_outputs",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3-27b-it:free",
@@ -8247,7 +9444,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-3-27b-it",
@@ -8256,7 +9454,7 @@
"name": "Google: Gemma 3 27B",
"created": 1741756359,
"description": "Gemma 3 introduces multimodality, supporting vision-language input and text outputs. It handles context windows up to 128k tokens, understands over 140 languages, and offers improved math, reasoning, and chat capabilities, including structured outputs and function calling. Gemma 3 27B is Google's latest open source model, successor to [Gemma 2](google/gemma-2-27b-it)",
- "context_length": 96000,
+ "context_length": 131072,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -8270,16 +9468,16 @@
"instruct_type": "gemma"
},
"pricing": {
- "prompt": "0.00000007",
- "completion": "0.00000026",
+ "prompt": "0.00000009",
+ "completion": "0.00000016",
"request": "0",
- "image": "0",
+ "image": "0.0000256",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 96000,
- "max_completion_tokens": 8192,
+ "context_length": 131072,
+ "max_completion_tokens": 16384,
"is_moderated": false
},
"per_request_limits": null,
@@ -8299,54 +9497,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "thedrummer/anubis-pro-105b-v1",
- "canonical_slug": "thedrummer/anubis-pro-105b-v1",
- "hugging_face_id": "TheDrummer/Anubis-Pro-105B-v1",
- "name": "TheDrummer: Anubis Pro 105B V1",
- "created": 1741642290,
- "description": "Anubis Pro 105B v1 is an expanded and refined variant of Meta’s Llama 3.3 70B, featuring 50% additional layers and further fine-tuning to leverage its increased capacity. Designed for advanced narrative, roleplay, and instructional tasks, it demonstrates enhanced emotional intelligence, creativity, nuanced character portrayal, and superior prompt adherence compared to smaller models. Its larger parameter count allows for deeper contextual understanding and extended reasoning capabilities, optimized for engaging, intelligent, and coherent interactions.",
- "context_length": 131072,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Other",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000005",
- "completion": "0.000001",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 131072,
- "max_completion_tokens": 131072,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "thedrummer/skyfall-36b-v2",
@@ -8368,8 +9520,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000016",
+ "prompt": "0.00000008",
+ "completion": "0.00000033",
"request": "0",
"image": "0",
"web_search": "0",
@@ -8377,7 +9529,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -8395,7 +9547,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "microsoft/phi-4-multimodal-instruct",
@@ -8446,7 +9599,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "perplexity/sonar-reasoning-pro",
@@ -8492,7 +9646,8 @@
"top_k",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "perplexity/sonar-pro",
@@ -8536,7 +9691,8 @@
"top_k",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "perplexity/sonar-deep-research",
@@ -8581,52 +9737,8 @@
"top_k",
"top_p",
"web_search_options"
- ]
- },
- {
- "id": "qwen/qwq-32b:free",
- "canonical_slug": "qwen/qwq-32b",
- "hugging_face_id": "Qwen/QwQ-32B",
- "name": "Qwen: QwQ 32B (free)",
- "created": 1741208814,
- "description": "QwQ is the reasoning model of the Qwen series. Compared with conventional instruction-tuned models, QwQ, which is capable of thinking and reasoning, can achieve significantly enhanced performance in downstream tasks, especially hard problems. QwQ-32B is the medium-sized reasoning model, which is capable of achieving competitive performance against state-of-the-art reasoning models, e.g., DeepSeek-R1, o1-mini.",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Qwen",
- "instruct_type": "qwq"
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "stop",
- "structured_outputs",
- "temperature",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwq-32b",
@@ -8681,7 +9793,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nousresearch/deephermes-3-llama-3-8b-preview:free",
@@ -8730,7 +9843,58 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "nousresearch/deephermes-3-llama-3-8b-preview",
+ "canonical_slug": "nousresearch/deephermes-3-llama-3-8b-preview",
+ "hugging_face_id": "NousResearch/DeepHermes-3-Llama-3-8B-Preview",
+ "name": "Nous: DeepHermes 3 Llama 3 8B Preview",
+ "created": 1740719372,
+ "description": "DeepHermes 3 Preview is the latest version of our flagship Hermes series of LLMs by Nous Research, and one of the first models in the world to unify Reasoning (long chains of thought that improve answer accuracy) and normal LLM response modes into one model. We have also improved LLM annotation, judgement, and function calling.\n\nDeepHermes 3 Preview is one of the first LLM models to unify both \"intuitive\", traditional mode responses and long chain of thought reasoning responses into a single model, toggled by a system prompt.",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Other",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000003",
+ "completion": "0.00000011",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": 131072,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.0-flash-lite-001",
@@ -8778,7 +9942,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3.7-sonnet",
@@ -8827,7 +9992,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3.7-sonnet:thinking",
@@ -8874,7 +10040,8 @@
"temperature",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "perplexity/r1-1776",
@@ -8918,7 +10085,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-saba",
@@ -8965,109 +10133,10 @@
"tool_choice",
"tools",
"top_p"
- ]
- },
- {
- "id": "cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
- "canonical_slug": "cognitivecomputations/dolphin3.0-r1-mistral-24b",
- "hugging_face_id": "cognitivecomputations/Dolphin3.0-R1-Mistral-24B",
- "name": "Dolphin3.0 R1 Mistral 24B (free)",
- "created": 1739462498,
- "description": "Dolphin 3.0 R1 is the next generation of the Dolphin series of instruct-tuned models. Designed to be the ultimate general purpose local model, enabling coding, math, agentic, function calling, and general use cases.\n\nThe R1 version has been trained for 3 epochs to reason using 800k reasoning traces from the Dolphin-R1 dataset.\n\nDolphin aims to be a general purpose reasoning instruct model, similar to the models behind ChatGPT, Claude, Gemini.\n\nPart of the [Dolphin 3.0 Collection](https://huggingface.co/collections/cognitivecomputations/dolphin-30-677ab47f73d7ff66743979a3) Curated and trained by [Eric Hartford](https://huggingface.co/ehartford), [Ben Gitter](https://huggingface.co/bigstorm), [BlouseJury](https://huggingface.co/BlouseJury) and [Cognitive Computations](https://huggingface.co/cognitivecomputations)",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Other",
- "instruct_type": "deepseek-r1"
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "include_reasoning",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "reasoning",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
- },
- {
- "id": "cognitivecomputations/dolphin3.0-r1-mistral-24b",
- "canonical_slug": "cognitivecomputations/dolphin3.0-r1-mistral-24b",
- "hugging_face_id": "cognitivecomputations/Dolphin3.0-R1-Mistral-24B",
- "name": "Dolphin3.0 R1 Mistral 24B",
- "created": 1739462498,
- "description": "Dolphin 3.0 R1 is the next generation of the Dolphin series of instruct-tuned models. Designed to be the ultimate general purpose local model, enabling coding, math, agentic, function calling, and general use cases.\n\nThe R1 version has been trained for 3 epochs to reason using 800k reasoning traces from the Dolphin-R1 dataset.\n\nDolphin aims to be a general purpose reasoning instruct model, similar to the models behind ChatGPT, Claude, Gemini.\n\nPart of the [Dolphin 3.0 Collection](https://huggingface.co/collections/cognitivecomputations/dolphin-30-677ab47f73d7ff66743979a3) Curated and trained by [Eric Hartford](https://huggingface.co/ehartford), [Ben Gitter](https://huggingface.co/bigstorm), [BlouseJury](https://huggingface.co/BlouseJury) and [Cognitive Computations](https://huggingface.co/cognitivecomputations)",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Other",
- "instruct_type": "deepseek-r1"
- },
- "pricing": {
- "prompt": "0.00000001",
- "completion": "0.00000003",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "include_reasoning",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "reasoning",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "cognitivecomputations/dolphin3.0-mistral-24b:free",
@@ -9116,7 +10185,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "cognitivecomputations/dolphin3.0-mistral-24b",
@@ -9138,8 +10208,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000003",
- "completion": "0.00000011",
+ "prompt": "0.00000004",
+ "completion": "0.00000017",
"request": "0",
"image": "0",
"web_search": "0",
@@ -9147,7 +10217,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -9165,7 +10235,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-guard-3-8b",
@@ -9215,7 +10286,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o3-mini-high",
@@ -9259,56 +10331,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
- },
- {
- "id": "deepseek/deepseek-r1-distill-llama-8b",
- "canonical_slug": "deepseek/deepseek-r1-distill-llama-8b",
- "hugging_face_id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
- "name": "DeepSeek: R1 Distill Llama 8B",
- "created": 1738937718,
- "description": "DeepSeek R1 Distill Llama 8B is a distilled large language model based on [Llama-3.1-8B-Instruct](/meta-llama/llama-3.1-8b-instruct), using outputs from [DeepSeek R1](/deepseek/deepseek-r1). The model combines advanced distillation techniques to achieve high performance across multiple benchmarks, including:\n\n- AIME 2024 pass@1: 50.4\n- MATH-500 pass@1: 89.1\n- CodeForces Rating: 1205\n\nThe model leverages fine-tuning from DeepSeek R1's outputs, enabling competitive performance comparable to larger frontier models.\n\nHugging Face: \n- [Llama-3.1-8B](https://huggingface.co/meta-llama/Llama-3.1-8B) \n- [DeepSeek-R1-Distill-Llama-8B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-8B) |",
- "context_length": 32000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "deepseek-r1"
- },
- "pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000004",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32000,
- "max_completion_tokens": 32000,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "include_reasoning",
- "logit_bias",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "reasoning",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.0-flash-001",
@@ -9359,7 +10383,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-vl-plus",
@@ -9402,7 +10427,8 @@
"seed",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "aion-labs/aion-1.0",
@@ -9443,7 +10469,8 @@
"reasoning",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "aion-labs/aion-1.0-mini",
@@ -9484,7 +10511,8 @@
"reasoning",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "aion-labs/aion-rp-llama-3.1-8b",
@@ -9523,7 +10551,8 @@
"max_tokens",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-vl-max",
@@ -9566,7 +10595,8 @@
"seed",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-turbo",
@@ -9611,7 +10641,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen2.5-vl-72b-instruct:free",
@@ -9620,7 +10651,7 @@
"name": "Qwen: Qwen2.5 VL 72B Instruct (free)",
"created": 1738410311,
"description": "Qwen2.5-VL is proficient in recognizing common objects such as flowers, birds, fish, and insects. It is also highly capable of analyzing texts, charts, icons, graphics, and layouts within images.",
- "context_length": 32768,
+ "context_length": 131072,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
@@ -9642,23 +10673,20 @@
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
+ "context_length": 131072,
+ "max_completion_tokens": 2048,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
- "frequency_penalty",
"max_tokens",
"presence_penalty",
"response_format",
"seed",
- "stop",
- "structured_outputs",
"temperature",
- "top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen2.5-vl-72b-instruct",
@@ -9681,8 +10709,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000007",
- "completion": "0.00000028",
+ "prompt": "0.00000008",
+ "completion": "0.00000033",
"request": "0",
"image": "0",
"web_search": "0",
@@ -9690,7 +10718,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -9708,7 +10736,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-plus",
@@ -9753,7 +10782,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-max",
@@ -9798,7 +10828,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o3-mini",
@@ -9842,7 +10873,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-small-24b-instruct-2501:free",
@@ -9891,7 +10923,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/mistral-small-24b-instruct-2501",
@@ -9913,8 +10948,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000004",
- "completion": "0.00000015",
+ "prompt": "0.00000005",
+ "completion": "0.00000008",
"request": "0",
"image": "0",
"web_search": "0",
@@ -9922,7 +10957,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 16384,
"is_moderated": false
},
"per_request_limits": null,
@@ -9944,7 +10979,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "deepseek/deepseek-r1-distill-qwen-32b",
@@ -9982,7 +11020,6 @@
"supported_parameters": [
"frequency_penalty",
"include_reasoning",
- "logit_bias",
"max_tokens",
"min_p",
"presence_penalty",
@@ -9991,10 +11028,12 @@
"response_format",
"seed",
"stop",
+ "structured_outputs",
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-distill-qwen-14b",
@@ -10043,7 +11082,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "perplexity/sonar-reasoning",
@@ -10088,7 +11128,8 @@
"top_k",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "perplexity/sonar",
@@ -10132,7 +11173,8 @@
"top_k",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "liquid/lfm-7b",
@@ -10169,20 +11211,17 @@
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
- "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
"repetition_penalty",
- "response_format",
"seed",
"stop",
"temperature",
"top_k",
- "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "liquid/lfm-3b",
@@ -10228,7 +11267,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-distill-llama-70b:free",
@@ -10279,7 +11319,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1-distill-llama-70b",
@@ -10310,7 +11351,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": null,
+ "max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,
@@ -10328,12 +11369,11 @@
"seed",
"stop",
"temperature",
- "tool_choice",
- "tools",
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1:free",
@@ -10373,7 +11413,8 @@
"max_tokens",
"reasoning",
"temperature"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-r1",
@@ -10428,7 +11469,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "minimax/minimax-01",
@@ -10468,7 +11510,8 @@
"max_tokens",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/codestral-2501",
@@ -10515,7 +11558,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "microsoft/phi-4",
@@ -10563,7 +11609,56 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "sao10k/l3.1-70b-hanami-x1",
+ "canonical_slug": "sao10k/l3.1-70b-hanami-x1",
+ "hugging_face_id": "Sao10K/L3.1-70B-Hanami-x1",
+ "name": "Sao10K: Llama 3.1 70B Hanami x1",
+ "created": 1736302854,
+ "description": "This is [Sao10K](/sao10k)'s experiment over [Euryale v2.2](/sao10k/l3.1-euryale-70b).",
+ "context_length": 16000,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Llama3",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.000003",
+ "completion": "0.000003",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 16000,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "deepseek/deepseek-chat",
@@ -10585,8 +11680,8 @@
"instruct_type": null
},
"pricing": {
- "prompt": "0.00000024999988",
- "completion": "0.000000999999888",
+ "prompt": "0.0000003",
+ "completion": "0.00000085",
"request": "0",
"image": "0",
"web_search": "0",
@@ -10594,7 +11689,7 @@
},
"top_provider": {
"context_length": 163840,
- "max_completion_tokens": null,
+ "max_completion_tokens": 163840,
"is_moderated": false
},
"per_request_limits": null,
@@ -10616,7 +11711,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "sao10k/l3.3-euryale-70b",
@@ -10664,7 +11760,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o1",
@@ -10709,7 +11806,8 @@
"structured_outputs",
"tool_choice",
"tools"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "cohere/command-r7b-12-2024",
@@ -10755,7 +11853,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemini-2.0-flash-exp:free",
@@ -10800,7 +11899,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.3-70b-instruct:free",
@@ -10848,7 +11948,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.3-70b-instruct",
@@ -10870,8 +11971,8 @@
"instruct_type": "llama3"
},
"pricing": {
- "prompt": "0.000000012",
- "completion": "0.000000036",
+ "prompt": "0.00000013",
+ "completion": "0.00000039",
"request": "0",
"image": "0",
"web_search": "0",
@@ -10879,7 +11980,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": 131072,
+ "max_completion_tokens": 120000,
"is_moderated": false
},
"per_request_limits": null,
@@ -10901,7 +12002,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "amazon/nova-lite-v1",
@@ -10944,7 +12046,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "amazon/nova-micro-v1",
@@ -10986,7 +12089,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "amazon/nova-pro-v1",
@@ -11029,56 +12133,8 @@
"tools",
"top_k",
"top_p"
- ]
- },
- {
- "id": "qwen/qwq-32b-preview",
- "canonical_slug": "qwen/qwq-32b-preview",
- "hugging_face_id": "Qwen/QwQ-32B-Preview",
- "name": "Qwen: QwQ 32B Preview",
- "created": 1732754541,
- "description": "QwQ-32B-Preview is an experimental research model focused on AI reasoning capabilities developed by the Qwen Team. As a preview release, it demonstrates promising analytical abilities while having several important limitations:\n\n1. **Language Mixing and Code-Switching**: The model may mix languages or switch between them unexpectedly, affecting response clarity.\n2. **Recursive Reasoning Loops**: The model may enter circular reasoning patterns, leading to lengthy responses without a conclusive answer.\n3. **Safety and Ethical Considerations**: The model requires enhanced safety measures to ensure reliable and secure performance, and users should exercise caution when deploying it.\n4. **Performance and Benchmark Limitations**: The model excels in math and coding but has room for improvement in other areas, such as common sense reasoning and nuanced language understanding.\n\n",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Qwen",
- "instruct_type": "deepseek-r1"
- },
- "pricing": {
- "prompt": "0.0000002",
- "completion": "0.0000002",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o-2024-11-20",
@@ -11132,7 +12188,8 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-large-2411",
@@ -11179,7 +12236,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/mistral-large-2407",
@@ -11226,7 +12286,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/pixtral-large-2411",
@@ -11274,7 +12337,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "qwen/qwen-2.5-coder-32b-instruct:free",
@@ -11323,7 +12389,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-2.5-coder-32b-instruct",
@@ -11345,8 +12412,8 @@
"instruct_type": "chatml"
},
"pricing": {
- "prompt": "0.00000006",
- "completion": "0.00000015",
+ "prompt": "0.00000004",
+ "completion": "0.00000016",
"request": "0",
"image": "0",
"web_search": "0",
@@ -11354,7 +12421,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": 16384,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -11373,7 +12440,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "raifle/sorcererlm-8x22b",
@@ -11420,7 +12488,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "thedrummer/unslopnemo-12b",
@@ -11466,7 +12535,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3.5-haiku",
@@ -11512,7 +12582,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3.5-haiku-20241022",
@@ -11559,55 +12630,8 @@
"tools",
"top_k",
"top_p"
- ]
- },
- {
- "id": "anthracite-org/magnum-v4-72b",
- "canonical_slug": "anthracite-org/magnum-v4-72b",
- "hugging_face_id": "anthracite-org/magnum-v4-72b",
- "name": "Magnum v4 72B",
- "created": 1729555200,
- "description": "This is a series of models designed to replicate the prose quality of the Claude 3 models, specifically Sonnet(https://openrouter.ai/anthropic/claude-3.5-sonnet) and Opus(https://openrouter.ai/anthropic/claude-3-opus).\n\nThe model is fine-tuned on top of [Qwen2.5 72B](https://openrouter.ai/qwen/qwen-2.5-72b-instruct).",
- "context_length": 16384,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Qwen",
- "instruct_type": "chatml"
- },
- "pricing": {
- "prompt": "0.000002",
- "completion": "0.000005",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 16384,
- "max_completion_tokens": 2048,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_a",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3.5-sonnet",
@@ -11654,7 +12678,61 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "anthracite-org/magnum-v4-72b",
+ "canonical_slug": "anthracite-org/magnum-v4-72b",
+ "hugging_face_id": "anthracite-org/magnum-v4-72b",
+ "name": "Magnum v4 72B",
+ "created": 1729555200,
+ "description": "This is a series of models designed to replicate the prose quality of the Claude 3 models, specifically Sonnet(https://openrouter.ai/anthropic/claude-3.5-sonnet) and Opus(https://openrouter.ai/anthropic/claude-3-opus).\n\nThe model is fine-tuned on top of [Qwen2.5 72B](https://openrouter.ai/qwen/qwen-2.5-72b-instruct).",
+ "context_length": 16384,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Qwen",
+ "instruct_type": "chatml"
+ },
+ "pricing": {
+ "prompt": "0.0000025",
+ "completion": "0.000005",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 16384,
+ "max_completion_tokens": 2048,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "top_a",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/ministral-8b",
@@ -11701,7 +12779,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/ministral-3b",
@@ -11746,13 +12827,16 @@
"structured_outputs",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "qwen/qwen-2.5-7b-instruct",
"canonical_slug": "qwen/qwen-2.5-7b-instruct",
"hugging_face_id": "Qwen/Qwen2.5-7B-Instruct",
- "name": "Qwen2.5 7B Instruct",
+ "name": "Qwen: Qwen2.5 7B Instruct",
"created": 1729036800,
"description": "Qwen2.5 7B is the latest series of Qwen large language models. Qwen2.5 brings the following improvements upon Qwen2:\n\n- Significantly more knowledge and has greatly improved capabilities in coding and mathematics, thanks to our specialized expert models in these domains.\n\n- Significant improvements in instruction following, generating long texts (over 8K tokens), understanding structured data (e.g, tables), and generating structured outputs especially JSON. More resilient to the diversity of system prompts, enhancing role-play implementation and condition-setting for chatbots.\n\n- Long-context Support up to 128K tokens and can generate up to 8K tokens.\n\n- Multilingual support for over 29 languages, including Chinese, English, French, Spanish, Portuguese, German, Italian, Russian, Japanese, Korean, Vietnamese, Thai, Arabic, and more.\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).",
"context_length": 65536,
@@ -11795,7 +12879,12 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": null,
+ "top_p": null,
+ "frequency_penalty": null
+ }
},
{
"id": "nvidia/llama-3.1-nemotron-70b-instruct",
@@ -11845,7 +12934,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "inflection/inflection-3-productivity",
@@ -11885,7 +12975,8 @@
"stop",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "inflection/inflection-3-pi",
@@ -11925,57 +13016,8 @@
"stop",
"temperature",
"top_p"
- ]
- },
- {
- "id": "google/gemini-flash-1.5-8b",
- "canonical_slug": "google/gemini-flash-1.5-8b",
- "hugging_face_id": null,
- "name": "Google: Gemini 1.5 Flash 8B",
- "created": 1727913600,
- "description": "Gemini Flash 1.5 8B is optimized for speed and efficiency, offering enhanced performance in small prompt tasks like chat, transcription, and translation. With reduced latency, it is highly effective for real-time and large-scale operations. This model focuses on cost-effective solutions while maintaining high-quality results.\n\n[Click here to learn more about this model](https://developers.googleblog.com/en/gemini-15-flash-8b-is-now-generally-available-for-use/).\n\nUsage of Gemini is subject to Google's [Gemini Terms of Use](https://ai.google.dev/terms).",
- "context_length": 1000000,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "text",
- "image"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Gemini",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000000375",
- "completion": "0.00000015",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0",
- "input_cache_read": "0.00000001",
- "input_cache_write": "0.0000000583"
- },
- "top_provider": {
- "context_length": 1000000,
- "max_completion_tokens": 8192,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "thedrummer/rocinante-12b",
@@ -12026,7 +13068,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthracite-org/magnum-v2-72b",
@@ -12073,7 +13116,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.2-3b-instruct:free",
@@ -12116,7 +13160,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.2-3b-instruct",
@@ -12169,7 +13214,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.2-1b-instruct",
@@ -12219,55 +13265,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "meta-llama/llama-3.2-90b-vision-instruct",
- "canonical_slug": "meta-llama/llama-3.2-90b-vision-instruct",
- "hugging_face_id": "meta-llama/Llama-3.2-90B-Vision-Instruct",
- "name": "Meta: Llama 3.2 90B Vision Instruct",
- "created": 1727222400,
- "description": "The Llama 90B Vision model is a top-tier, 90-billion-parameter multimodal model designed for the most challenging visual reasoning and language tasks. It offers unparalleled accuracy in image captioning, visual question answering, and advanced image-text comprehension. Pre-trained on vast multimodal datasets and fine-tuned with human feedback, the Llama 90B Vision is engineered to handle the most demanding image-based AI tasks.\n\nThis model is perfect for industries requiring cutting-edge multimodal AI capabilities, particularly those dealing with complex, real-time visual and textual analysis.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD_VISION.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).",
- "context_length": 32768,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "text",
- "image"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "llama3"
- },
- "pricing": {
- "prompt": "0.00000035",
- "completion": "0.0000004",
- "request": "0",
- "image": "0.0005058",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": 16384,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "response_format",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.2-11b-vision-instruct",
@@ -12318,7 +13317,57 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "meta-llama/llama-3.2-90b-vision-instruct",
+ "canonical_slug": "meta-llama/llama-3.2-90b-vision-instruct",
+ "hugging_face_id": "meta-llama/Llama-3.2-90B-Vision-Instruct",
+ "name": "Meta: Llama 3.2 90B Vision Instruct",
+ "created": 1727222400,
+ "description": "The Llama 90B Vision model is a top-tier, 90-billion-parameter multimodal model designed for the most challenging visual reasoning and language tasks. It offers unparalleled accuracy in image captioning, visual question answering, and advanced image-text comprehension. Pre-trained on vast multimodal datasets and fine-tuned with human feedback, the Llama 90B Vision is engineered to handle the most demanding image-based AI tasks.\n\nThis model is perfect for industries requiring cutting-edge multimodal AI capabilities, particularly those dealing with complex, real-time visual and textual analysis.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD_VISION.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Llama3",
+ "instruct_type": "llama3"
+ },
+ "pricing": {
+ "prompt": "0.00000035",
+ "completion": "0.0000004",
+ "request": "0",
+ "image": "0.0005058",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": 16384,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-2.5-72b-instruct:free",
@@ -12367,7 +13416,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-2.5-72b-instruct",
@@ -12398,7 +13448,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -12419,7 +13469,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "neversleep/llama-3.1-lumimaid-8b",
@@ -12457,6 +13508,7 @@
"supported_parameters": [
"frequency_penalty",
"logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
@@ -12468,8 +13520,10 @@
"temperature",
"top_a",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o1-mini",
@@ -12508,7 +13562,8 @@
"supported_parameters": [
"max_tokens",
"seed"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/o1-mini-2024-09-12",
@@ -12547,7 +13602,8 @@
"supported_parameters": [
"max_tokens",
"seed"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/pixtral-12b",
@@ -12601,54 +13657,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "cohere/command-r-plus-08-2024",
- "canonical_slug": "cohere/command-r-plus-08-2024",
- "hugging_face_id": null,
- "name": "Cohere: Command R+ (08-2024)",
- "created": 1724976000,
- "description": "command-r-plus-08-2024 is an update of the [Command R+](/models/cohere/command-r-plus) with roughly 50% higher throughput and 25% lower latencies as compared to the previous Command R+ version, while keeping the hardware footprint the same.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
- "context_length": 128000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Cohere",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000025",
- "completion": "0.00001",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 4000,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tools",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "cohere/command-r-08-2024",
@@ -12695,7 +13707,56 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "cohere/command-r-plus-08-2024",
+ "canonical_slug": "cohere/command-r-plus-08-2024",
+ "hugging_face_id": null,
+ "name": "Cohere: Command R+ (08-2024)",
+ "created": 1724976000,
+ "description": "command-r-plus-08-2024 is an update of the [Command R+](/models/cohere/command-r-plus) with roughly 50% higher throughput and 25% lower latencies as compared to the previous Command R+ version, while keeping the hardware footprint the same.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
+ "context_length": 128000,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Cohere",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000025",
+ "completion": "0.00001",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 128000,
+ "max_completion_tokens": 4000,
+ "is_moderated": true
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "qwen/qwen-2.5-vl-7b-instruct",
@@ -12747,7 +13808,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "sao10k/l3.1-euryale-70b",
@@ -12784,7 +13846,6 @@
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
"max_tokens",
"min_p",
"presence_penalty",
@@ -12796,7 +13857,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "microsoft/phi-3.5-mini-128k-instruct",
@@ -12837,7 +13899,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nousresearch/hermes-3-llama-3.1-70b",
@@ -12846,7 +13909,7 @@
"name": "Nous: Hermes 3 70B Instruct",
"created": 1723939200,
"description": "Hermes 3 is a generalist language model with many improvements over [Hermes 2](/models/nousresearch/nous-hermes-2-mistral-7b-dpo), including advanced agentic capabilities, much better roleplaying, reasoning, multi-turn conversation, long context coherence, and improvements across the board.\n\nHermes 3 70B is a competitive, if not superior finetune of the [Llama-3.1 70B foundation model](/models/meta-llama/llama-3.1-70b-instruct), focused on aligning LLMs to the user, with powerful steering capabilities and control given to the end user.\n\nThe Hermes 3 series builds and expands on the Hermes 2 set of capabilities, including more powerful and reliable function calling and structured output capabilities, generalist assistant capabilities, and improved code generation skills.",
- "context_length": 131072,
+ "context_length": 65000,
"architecture": {
"modality": "text->text",
"input_modalities": [
@@ -12859,7 +13922,7 @@
"instruct_type": "chatml"
},
"pricing": {
- "prompt": "0.00000012",
+ "prompt": "0.0000003",
"completion": "0.0000003",
"request": "0",
"image": "0",
@@ -12867,8 +13930,8 @@
"internal_reasoning": "0"
},
"top_provider": {
- "context_length": 131072,
- "max_completion_tokens": 131072,
+ "context_length": 65000,
+ "max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null,
@@ -12890,7 +13953,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "nousresearch/hermes-3-llama-3.1-405b",
@@ -12912,8 +13976,8 @@
"instruct_type": "chatml"
},
"pricing": {
- "prompt": "0.0000008",
- "completion": "0.0000008",
+ "prompt": "0.000001",
+ "completion": "0.000001",
"request": "0",
"image": "0",
"web_search": "0",
@@ -12921,7 +13985,7 @@
},
"top_provider": {
"context_length": 131072,
- "max_completion_tokens": 131072,
+ "max_completion_tokens": 16384,
"is_moderated": false
},
"per_request_limits": null,
@@ -12940,7 +14004,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/chatgpt-4o-latest",
@@ -12989,7 +14054,8 @@
"temperature",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "sao10k/l3-lunaris-8b",
@@ -13026,7 +14092,6 @@
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
"max_tokens",
"min_p",
"presence_penalty",
@@ -13037,7 +14102,8 @@
"temperature",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o-2024-08-06",
@@ -13091,7 +14157,8 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.1-405b",
@@ -13113,8 +14180,8 @@
"instruct_type": "none"
},
"pricing": {
- "prompt": "0.000002",
- "completion": "0.000002",
+ "prompt": "0.000004",
+ "completion": "0.000004",
"request": "0",
"image": "0",
"web_search": "0",
@@ -13122,7 +14189,7 @@
},
"top_provider": {
"context_length": 32768,
- "max_completion_tokens": null,
+ "max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@@ -13140,105 +14207,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "meta-llama/llama-3.1-8b-instruct",
- "canonical_slug": "meta-llama/llama-3.1-8b-instruct",
- "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct",
- "name": "Meta: Llama 3.1 8B Instruct",
- "created": 1721692800,
- "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
- "context_length": 16384,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "llama3"
- },
- "pricing": {
- "prompt": "0.00000002",
- "completion": "0.00000003",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 16384,
- "max_completion_tokens": 16384,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
- },
- {
- "id": "meta-llama/llama-3.1-405b-instruct:free",
- "canonical_slug": "meta-llama/llama-3.1-405b-instruct",
- "hugging_face_id": "meta-llama/Meta-Llama-3.1-405B-Instruct",
- "name": "Meta: Llama 3.1 405B Instruct (free)",
- "created": 1721692800,
- "description": "The highly anticipated 400B class of Llama3 is here! Clocking in at 128k context with impressive eval scores, the Meta AI team continues to push the frontier of open-source LLMs.\n\nMeta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 405B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models including GPT-4o and Claude 3.5 Sonnet in evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
- "context_length": 65536,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "llama3"
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 65536,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "stop",
- "structured_outputs",
- "temperature",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3.1-405b-instruct",
@@ -13291,15 +14261,16 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
- "id": "meta-llama/llama-3.1-70b-instruct",
- "canonical_slug": "meta-llama/llama-3.1-70b-instruct",
- "hugging_face_id": "meta-llama/Meta-Llama-3.1-70B-Instruct",
- "name": "Meta: Llama 3.1 70B Instruct",
+ "id": "meta-llama/llama-3.1-8b-instruct",
+ "canonical_slug": "meta-llama/llama-3.1-8b-instruct",
+ "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct",
+ "name": "Meta: Llama 3.1 8B Instruct",
"created": 1721692800,
- "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 70B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
+ "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
"context_length": 131072,
"architecture": {
"modality": "text->text",
@@ -13313,8 +14284,8 @@
"instruct_type": "llama3"
},
"pricing": {
- "prompt": "0.0000001",
- "completion": "0.00000028",
+ "prompt": "0.00000002",
+ "completion": "0.00000003",
"request": "0",
"image": "0",
"web_search": "0",
@@ -13344,7 +14315,62 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "meta-llama/llama-3.1-70b-instruct",
+ "canonical_slug": "meta-llama/llama-3.1-70b-instruct",
+ "hugging_face_id": "meta-llama/Meta-Llama-3.1-70B-Instruct",
+ "name": "Meta: Llama 3.1 70B Instruct",
+ "created": 1721692800,
+ "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 70B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
+ "context_length": 131072,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Llama3",
+ "instruct_type": "llama3"
+ },
+ "pricing": {
+ "prompt": "0.0000004",
+ "completion": "0.0000004",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 131072,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-nemo:free",
@@ -13393,7 +14419,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/mistral-nemo",
@@ -13446,61 +14475,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "openai/gpt-4o-mini",
- "canonical_slug": "openai/gpt-4o-mini",
- "hugging_face_id": null,
- "name": "OpenAI: GPT-4o-mini",
- "created": 1721260800,
- "description": "GPT-4o mini is OpenAI's newest model after [GPT-4 Omni](/models/openai/gpt-4o), supporting both text and image inputs with text outputs.\n\nAs their most advanced small model, it is many multiples more affordable than other recent frontier models, and more than 60% cheaper than [GPT-3.5 Turbo](/models/openai/gpt-3.5-turbo). It maintains SOTA intelligence, while being significantly more cost-effective.\n\nGPT-4o mini achieves an 82% score on MMLU and presently ranks higher than GPT-4 on chat preferences [common leaderboards](https://arena.lmsys.org/).\n\nCheck out the [launch announcement](https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence/) to learn more.\n\n#multimodal",
- "context_length": 128000,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "text",
- "image",
- "file"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "GPT",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.00000015",
- "completion": "0.0000006",
- "request": "0",
- "image": "0.000217",
- "web_search": "0",
- "internal_reasoning": "0",
- "input_cache_read": "0.000000075"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 16384,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_logprobs",
- "top_p",
- "web_search_options"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "openai/gpt-4o-mini-2024-07-18",
@@ -13554,7 +14532,63 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "openai/gpt-4o-mini",
+ "canonical_slug": "openai/gpt-4o-mini",
+ "hugging_face_id": null,
+ "name": "OpenAI: GPT-4o-mini",
+ "created": 1721260800,
+ "description": "GPT-4o mini is OpenAI's newest model after [GPT-4 Omni](/models/openai/gpt-4o), supporting both text and image inputs with text outputs.\n\nAs their most advanced small model, it is many multiples more affordable than other recent frontier models, and more than 60% cheaper than [GPT-3.5 Turbo](/models/openai/gpt-3.5-turbo). It maintains SOTA intelligence, while being significantly more cost-effective.\n\nGPT-4o mini achieves an 82% score on MMLU and presently ranks higher than GPT-4 on chat preferences [common leaderboards](https://arena.lmsys.org/).\n\nCheck out the [launch announcement](https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence/) to learn more.\n\n#multimodal",
+ "context_length": 128000,
+ "architecture": {
+ "modality": "text+image->text",
+ "input_modalities": [
+ "text",
+ "image",
+ "file"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "GPT",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00000015",
+ "completion": "0.0000006",
+ "request": "0",
+ "image": "0.000217",
+ "web_search": "0",
+ "internal_reasoning": "0",
+ "input_cache_read": "0.000000075"
+ },
+ "top_provider": {
+ "context_length": 128000,
+ "max_completion_tokens": 16384,
+ "is_moderated": true
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_logprobs",
+ "top_p",
+ "web_search_options"
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-2-27b-it",
@@ -13598,7 +14632,8 @@
"structured_outputs",
"temperature",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-2-9b-it:free",
@@ -13647,7 +14682,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "google/gemma-2-9b-it",
@@ -13670,7 +14706,7 @@
},
"pricing": {
"prompt": "0.00000001",
- "completion": "0.00000002",
+ "completion": "0.00000003",
"request": "0",
"image": "0",
"web_search": "0",
@@ -13693,11 +14729,13 @@
"response_format",
"seed",
"stop",
+ "structured_outputs",
"temperature",
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3.5-sonnet-20240620",
@@ -13744,7 +14782,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "sao10k/l3-euryale-70b",
@@ -13781,9 +14820,7 @@
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
"max_tokens",
- "min_p",
"presence_penalty",
"repetition_penalty",
"seed",
@@ -13791,157 +14828,8 @@
"temperature",
"top_k",
"top_p"
- ]
- },
- {
- "id": "nousresearch/hermes-2-pro-llama-3-8b",
- "canonical_slug": "nousresearch/hermes-2-pro-llama-3-8b",
- "hugging_face_id": "NousResearch/Hermes-2-Pro-Llama-3-8B",
- "name": "NousResearch: Hermes 2 Pro - Llama-3 8B",
- "created": 1716768000,
- "description": "Hermes 2 Pro is an upgraded, retrained version of Nous Hermes 2, consisting of an updated and cleaned version of the OpenHermes 2.5 Dataset, as well as a newly introduced Function Calling and JSON Mode dataset developed in-house.",
- "context_length": 131072,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "chatml"
- },
- "pricing": {
- "prompt": "0.000000025",
- "completion": "0.00000004",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 131072,
- "max_completion_tokens": 131072,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "top_k",
- "top_logprobs",
- "top_p"
- ]
- },
- {
- "id": "mistralai/mistral-7b-instruct:free",
- "canonical_slug": "mistralai/mistral-7b-instruct",
- "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3",
- "name": "Mistral: Mistral 7B Instruct (free)",
- "created": 1716768000,
- "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\n*Mistral 7B Instruct has multiple version variants, and this is intended to be the latest version.*",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Mistral",
- "instruct_type": "mistral"
- },
- "pricing": {
- "prompt": "0",
- "completion": "0",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": 16384,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "response_format",
- "seed",
- "stop",
- "temperature",
- "tool_choice",
- "tools",
- "top_k",
- "top_p"
- ]
- },
- {
- "id": "mistralai/mistral-7b-instruct",
- "canonical_slug": "mistralai/mistral-7b-instruct",
- "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3",
- "name": "Mistral: Mistral 7B Instruct",
- "created": 1716768000,
- "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\n*Mistral 7B Instruct has multiple version variants, and this is intended to be the latest version.*",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Mistral",
- "instruct_type": "mistral"
- },
- "pricing": {
- "prompt": "0.000000028",
- "completion": "0.000000054",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": 16384,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "response_format",
- "seed",
- "stop",
- "temperature",
- "tool_choice",
- "tools",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-7b-instruct-v0.3",
@@ -13991,7 +14879,163 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
+ },
+ {
+ "id": "nousresearch/hermes-2-pro-llama-3-8b",
+ "canonical_slug": "nousresearch/hermes-2-pro-llama-3-8b",
+ "hugging_face_id": "NousResearch/Hermes-2-Pro-Llama-3-8B",
+ "name": "NousResearch: Hermes 2 Pro - Llama-3 8B",
+ "created": 1716768000,
+ "description": "Hermes 2 Pro is an upgraded, retrained version of Nous Hermes 2, consisting of an updated and cleaned version of the OpenHermes 2.5 Dataset, as well as a newly introduced Function Calling and JSON Mode dataset developed in-house.",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Llama3",
+ "instruct_type": "chatml"
+ },
+ "pricing": {
+ "prompt": "0.000000025",
+ "completion": "0.00000008",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "max_tokens",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "mistralai/mistral-7b-instruct:free",
+ "canonical_slug": "mistralai/mistral-7b-instruct",
+ "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3",
+ "name": "Mistral: Mistral 7B Instruct (free)",
+ "created": 1716768000,
+ "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\n*Mistral 7B Instruct has multiple version variants, and this is intended to be the latest version.*",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Mistral",
+ "instruct_type": "mistral"
+ },
+ "pricing": {
+ "prompt": "0",
+ "completion": "0",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": 16384,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
+ },
+ {
+ "id": "mistralai/mistral-7b-instruct",
+ "canonical_slug": "mistralai/mistral-7b-instruct",
+ "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3",
+ "name": "Mistral: Mistral 7B Instruct",
+ "created": 1716768000,
+ "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\n*Mistral 7B Instruct has multiple version variants, and this is intended to be the latest version.*",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Mistral",
+ "instruct_type": "mistral"
+ },
+ "pricing": {
+ "prompt": "0.000000028",
+ "completion": "0.000000054",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": 16384,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "microsoft/phi-3-mini-128k-instruct",
@@ -14032,7 +15076,8 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "microsoft/phi-3-medium-128k-instruct",
@@ -14073,103 +15118,8 @@
"tool_choice",
"tools",
"top_p"
- ]
- },
- {
- "id": "neversleep/llama-3-lumimaid-70b",
- "canonical_slug": "neversleep/llama-3-lumimaid-70b",
- "hugging_face_id": "NeverSleep/Llama-3-Lumimaid-70B-v0.1",
- "name": "NeverSleep: Llama 3 Lumimaid 70B",
- "created": 1715817600,
- "description": "The NeverSleep team is back, with a Llama 3 70B finetune trained on their curated roleplay data. Striking a balance between eRP and RP, Lumimaid was designed to be serious, yet uncensored when necessary.\n\nTo enhance it's overall intelligence and chat capability, roughly 40% of the training data was not roleplay. This provides a breadth of knowledge to access, while still keeping roleplay as the primary strength.\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
- "context_length": 8192,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "llama3"
- },
- "pricing": {
- "prompt": "0.000004",
- "completion": "0.000006",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 8192,
- "max_completion_tokens": 4096,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "seed",
- "stop",
- "temperature",
- "top_k",
- "top_p"
- ]
- },
- {
- "id": "google/gemini-flash-1.5",
- "canonical_slug": "google/gemini-flash-1.5",
- "hugging_face_id": null,
- "name": "Google: Gemini 1.5 Flash ",
- "created": 1715644800,
- "description": "Gemini 1.5 Flash is a foundation model that performs well at a variety of multimodal tasks such as visual understanding, classification, summarization, and creating content from image, audio and video. It's adept at processing visual and text inputs such as photographs, documents, infographics, and screenshots.\n\nGemini 1.5 Flash is designed for high-volume, high-frequency tasks where cost and latency matter. On most common tasks, Flash achieves comparable quality to other Gemini Pro models at a significantly reduced cost. Flash is well-suited for applications like chat assistants and on-demand content generation where speed and scale matter.\n\nUsage of Gemini is subject to Google's [Gemini Terms of Use](https://ai.google.dev/terms).\n\n#multimodal",
- "context_length": 1000000,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "text",
- "image"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Gemini",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.000000075",
- "completion": "0.0000003",
- "request": "0",
- "image": "0.00004",
- "web_search": "0",
- "internal_reasoning": "0",
- "input_cache_read": "0.00000001875",
- "input_cache_write": "0.0000001583"
- },
- "top_provider": {
- "context_length": 1000000,
- "max_completion_tokens": 8192,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o",
@@ -14223,7 +15173,8 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o:extended",
@@ -14276,53 +15227,8 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
- },
- {
- "id": "meta-llama/llama-guard-2-8b",
- "canonical_slug": "meta-llama/llama-guard-2-8b",
- "hugging_face_id": "meta-llama/Meta-Llama-Guard-2-8B",
- "name": "Meta: LlamaGuard 2 8B",
- "created": 1715558400,
- "description": "This safeguard model has 8B parameters and is based on the Llama 3 family. Just like is predecessor, [LlamaGuard 1](https://huggingface.co/meta-llama/LlamaGuard-7b), it can do both prompt and response classification.\n\nLlamaGuard 2 acts as a normal LLM would, generating text that indicates whether the given input/output is safe/unsafe. If deemed unsafe, it will also share the content categories violated.\n\nFor best results, please use raw prompt input or the `/completions` endpoint, instead of the chat API.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
- "context_length": 8192,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Llama3",
- "instruct_type": "none"
- },
- "pricing": {
- "prompt": "0.0000002",
- "completion": "0.0000002",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 8192,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "max_tokens",
- "min_p",
- "presence_penalty",
- "repetition_penalty",
- "stop",
- "temperature",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4o-2024-05-13",
@@ -14375,7 +15281,55 @@
"top_logprobs",
"top_p",
"web_search_options"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "meta-llama/llama-guard-2-8b",
+ "canonical_slug": "meta-llama/llama-guard-2-8b",
+ "hugging_face_id": "meta-llama/Meta-Llama-Guard-2-8B",
+ "name": "Meta: LlamaGuard 2 8B",
+ "created": 1715558400,
+ "description": "This safeguard model has 8B parameters and is based on the Llama 3 family. Just like is predecessor, [LlamaGuard 1](https://huggingface.co/meta-llama/LlamaGuard-7b), it can do both prompt and response classification.\n\nLlamaGuard 2 acts as a normal LLM would, generating text that indicates whether the given input/output is safe/unsafe. If deemed unsafe, it will also share the content categories violated.\n\nFor best results, please use raw prompt input or the `/completions` endpoint, instead of the chat API.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).",
+ "context_length": 8192,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Llama3",
+ "instruct_type": "none"
+ },
+ "pricing": {
+ "prompt": "0.0000002",
+ "completion": "0.0000002",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 8192,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3-8b-instruct",
@@ -14425,7 +15379,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "meta-llama/llama-3-70b-instruct",
@@ -14477,7 +15432,8 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mixtral-8x22b-instruct",
@@ -14529,7 +15485,10 @@
"top_k",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "microsoft/wizardlm-2-8x22b",
@@ -14560,13 +15519,12 @@
},
"top_provider": {
"context_length": 65536,
- "max_completion_tokens": 65536,
+ "max_completion_tokens": 16384,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
- "logit_bias",
"max_tokens",
"min_p",
"presence_penalty",
@@ -14577,55 +15535,8 @@
"temperature",
"top_k",
"top_p"
- ]
- },
- {
- "id": "google/gemini-pro-1.5",
- "canonical_slug": "google/gemini-pro-1.5",
- "hugging_face_id": null,
- "name": "Google: Gemini 1.5 Pro",
- "created": 1712620800,
- "description": "Google's latest multimodal model, supports image and video[0] in text or chat prompts.\n\nOptimized for language tasks including:\n\n- Code generation\n- Text generation\n- Text editing\n- Problem solving\n- Recommendations\n- Information extraction\n- Data extraction or generation\n- AI agents\n\nUsage of Gemini is subject to Google's [Gemini Terms of Use](https://ai.google.dev/terms).\n\n* [0]: Video input is not available through OpenRouter at this time.",
- "context_length": 2000000,
- "architecture": {
- "modality": "text+image->text",
- "input_modalities": [
- "text",
- "image"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Gemini",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.00000125",
- "completion": "0.000005",
- "request": "0",
- "image": "0.0006575",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 2000000,
- "max_completion_tokens": 8192,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4-turbo",
@@ -14676,194 +15587,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "cohere/command-r-plus",
- "canonical_slug": "cohere/command-r-plus",
- "hugging_face_id": null,
- "name": "Cohere: Command R+",
- "created": 1712188800,
- "description": "Command R+ is a new, 104B-parameter LLM from Cohere. It's useful for roleplay, general consumer usecases, and Retrieval Augmented Generation (RAG).\n\nIt offers multilingual support for ten key languages to facilitate global business operations. See benchmarks and the launch post [here](https://txt.cohere.com/command-r-plus-microsoft-azure/).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
- "context_length": 128000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Cohere",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.000003",
- "completion": "0.000015",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 4000,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tools",
- "top_k",
- "top_p"
- ]
- },
- {
- "id": "cohere/command-r-plus-04-2024",
- "canonical_slug": "cohere/command-r-plus-04-2024",
- "hugging_face_id": null,
- "name": "Cohere: Command R+ (04-2024)",
- "created": 1712016000,
- "description": "Command R+ is a new, 104B-parameter LLM from Cohere. It's useful for roleplay, general consumer usecases, and Retrieval Augmented Generation (RAG).\n\nIt offers multilingual support for ten key languages to facilitate global business operations. See benchmarks and the launch post [here](https://txt.cohere.com/command-r-plus-microsoft-azure/).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
- "context_length": 128000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Cohere",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.000003",
- "completion": "0.000015",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 4000,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tools",
- "top_k",
- "top_p"
- ]
- },
- {
- "id": "cohere/command",
- "canonical_slug": "cohere/command",
- "hugging_face_id": null,
- "name": "Cohere: Command",
- "created": 1710374400,
- "description": "Command is an instruction-following conversational model that performs language tasks with high quality, more reliably and with a longer context than our base generative models.\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
- "context_length": 4096,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Cohere",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.000001",
- "completion": "0.000002",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 4096,
- "max_completion_tokens": 4000,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "top_k",
- "top_p"
- ]
- },
- {
- "id": "cohere/command-r",
- "canonical_slug": "cohere/command-r",
- "hugging_face_id": null,
- "name": "Cohere: Command R",
- "created": 1710374400,
- "description": "Command-R is a 35B parameter model that performs conversational language tasks at a higher quality, more reliably, and with a longer context than previous models. It can be used for complex workflows like code generation, retrieval augmented generation (RAG), tool use, and agents.\n\nRead the launch post [here](https://txt.cohere.com/command-r/).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
- "context_length": 128000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Cohere",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000005",
- "completion": "0.0000015",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 4000,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tools",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3-haiku",
@@ -14909,7 +15634,8 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "anthropic/claude-3-opus",
@@ -14955,54 +15681,8 @@
"tools",
"top_k",
"top_p"
- ]
- },
- {
- "id": "cohere/command-r-03-2024",
- "canonical_slug": "cohere/command-r-03-2024",
- "hugging_face_id": null,
- "name": "Cohere: Command R (03-2024)",
- "created": 1709341200,
- "description": "Command-R is a 35B parameter model that performs conversational language tasks at a higher quality, more reliably, and with a longer context than previous models. It can be used for complex workflows like code generation, retrieval augmented generation (RAG), tool use, and agents.\n\nRead the launch post [here](https://txt.cohere.com/command-r/).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).",
- "context_length": 128000,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Cohere",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000005",
- "completion": "0.0000015",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 128000,
- "max_completion_tokens": 4000,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tools",
- "top_k",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-large",
@@ -15049,7 +15729,10 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "openai/gpt-3.5-turbo-0613",
@@ -15099,7 +15782,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4-turbo-preview",
@@ -15149,54 +15833,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "mistralai/mistral-small",
- "canonical_slug": "mistralai/mistral-small",
- "hugging_face_id": null,
- "name": "Mistral Small",
- "created": 1704844800,
- "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.",
- "context_length": 32768,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "Mistral",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.0000002",
- "completion": "0.0000006",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 32768,
- "max_completion_tokens": null,
- "is_moderated": false
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-tiny",
@@ -15243,7 +15881,109 @@
"tool_choice",
"tools",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
+ },
+ {
+ "id": "mistralai/mistral-small",
+ "canonical_slug": "mistralai/mistral-small",
+ "hugging_face_id": null,
+ "name": "Mistral Small",
+ "created": 1704844800,
+ "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Mistral",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.0000002",
+ "completion": "0.0000006",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
+ },
+ {
+ "id": "mistralai/mistral-7b-instruct-v0.2",
+ "canonical_slug": "mistralai/mistral-7b-instruct-v0.2",
+ "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.2",
+ "name": "Mistral: Mistral 7B Instruct v0.2",
+ "created": 1703721600,
+ "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\nAn improved version of [Mistral 7B Instruct](/modelsmistralai/mistral-7b-instruct-v0.1), with the following changes:\n\n- 32k context window (vs 8k context in v0.1)\n- Rope-theta = 1e6\n- No Sliding-Window Attention",
+ "context_length": 32768,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "Mistral",
+ "instruct_type": "mistral"
+ },
+ "pricing": {
+ "prompt": "0.0000002",
+ "completion": "0.0000002",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 32768,
+ "max_completion_tokens": null,
+ "is_moderated": false
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "max_tokens",
+ "min_p",
+ "presence_penalty",
+ "repetition_penalty",
+ "stop",
+ "temperature",
+ "top_k",
+ "top_p"
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "mistralai/mixtral-8x7b-instruct",
@@ -15265,8 +16005,8 @@
"instruct_type": "mistral"
},
"pricing": {
- "prompt": "0.0000004",
- "completion": "0.0000004",
+ "prompt": "0.00000054",
+ "completion": "0.00000054",
"request": "0",
"image": "0",
"web_search": "0",
@@ -15293,7 +16033,10 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
},
{
"id": "neversleep/noromaid-20b",
@@ -15331,6 +16074,7 @@
"supported_parameters": [
"frequency_penalty",
"logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
@@ -15342,8 +16086,10 @@
"temperature",
"top_a",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "alpindale/goliath-120b",
@@ -15381,6 +16127,7 @@
"supported_parameters": [
"frequency_penalty",
"logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
@@ -15392,8 +16139,10 @@
"temperature",
"top_a",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openrouter/auto",
@@ -15401,7 +16150,7 @@
"hugging_face_id": null,
"name": "Auto Router",
"created": 1699401600,
- "description": "Your prompt will be processed by a meta-model and routed to one of dozens of models (see below), optimizing for the best possible output.\n\nTo see which model was used, visit [Activity](/activity), or read the `model` attribute of the response. Your response will be priced at the same rate as the routed model.\n\nThe meta-model is powered by [Not Diamond](https://docs.notdiamond.ai/docs/how-not-diamond-works). Learn more in our [docs](/docs/model-routing).\n\nRequests will be routed to the following models:\n- [openai/gpt-4o-2024-08-06](/openai/gpt-4o-2024-08-06)\n- [openai/gpt-4o-2024-05-13](/openai/gpt-4o-2024-05-13)\n- [openai/gpt-4o-mini-2024-07-18](/openai/gpt-4o-mini-2024-07-18)\n- [openai/chatgpt-4o-latest](/openai/chatgpt-4o-latest)\n- [openai/o1-preview-2024-09-12](/openai/o1-preview-2024-09-12)\n- [openai/o1-mini-2024-09-12](/openai/o1-mini-2024-09-12)\n- [anthropic/claude-3.5-sonnet](/anthropic/claude-3.5-sonnet)\n- [anthropic/claude-3.5-haiku](/anthropic/claude-3.5-haiku)\n- [anthropic/claude-3-opus](/anthropic/claude-3-opus)\n- [anthropic/claude-2.1](/anthropic/claude-2.1)\n- [google/gemini-pro-1.5](/google/gemini-pro-1.5)\n- [google/gemini-flash-1.5](/google/gemini-flash-1.5)\n- [mistralai/mistral-large-2407](/mistralai/mistral-large-2407)\n- [mistralai/mistral-nemo](/mistralai/mistral-nemo)\n- [deepseek/deepseek-r1](/deepseek/deepseek-r1)\n- [meta-llama/llama-3.1-70b-instruct](/meta-llama/llama-3.1-70b-instruct)\n- [meta-llama/llama-3.1-405b-instruct](/meta-llama/llama-3.1-405b-instruct)\n- [mistralai/mixtral-8x22b-instruct](/mistralai/mixtral-8x22b-instruct)\n- [cohere/command-r-plus](/cohere/command-r-plus)\n- [cohere/command-r](/cohere/command-r)",
+ "description": "Your prompt will be processed by a meta-model and routed to one of dozens of models (see below), optimizing for the best possible output.\n\nTo see which model was used, visit [Activity](/activity), or read the `model` attribute of the response. Your response will be priced at the same rate as the routed model.\n\nThe meta-model is powered by [Not Diamond](https://docs.notdiamond.ai/docs/how-not-diamond-works). Learn more in our [docs](/docs/model-routing).\n\nRequests will be routed to the following models:\n- [openai/gpt-5](/openai/gpt-5)\n- [openai/gpt-5-mini](/openai/gpt-5-mini)\n- [openai/gpt-5-nano](/openai/gpt-5-nano)\n- [openai/gpt-4.1-nano](/openai/gpt-4.1-nano)\n- [openai/gpt-4.1](/openai/gpt-4.1)\n- [openai/gpt-4.1-mini](/openai/gpt-4.1-mini)\n- [openai/gpt-4.1](/openai/gpt-4.1)\n- [openai/gpt-4o-mini](/openai/gpt-4o-mini)\n- [openai/chatgpt-4o-latest](/openai/chatgpt-4o-latest)\n- [anthropic/claude-3.5-haiku](/anthropic/claude-3.5-haiku)\n- [anthropic/claude-opus-4-1](/anthropic/claude-opus-4-1)\n- [anthropic/claude-sonnet-4-0](/anthropic/claude-sonnet-4-0)\n- [anthropic/claude-3-7-sonnet-latest](/anthropic/claude-3-7-sonnet-latest)\n- [google/gemini-2.5-pro](/google/gemini-2.5-pro)\n- [google/gemini-2.5-flash](/google/gemini-2.5-flash)\n- [mistral/mistral-large-latest](/mistral/mistral-large-latest)\n- [mistral/mistral-medium-latest](/mistral/mistral-medium-latest)\n- [mistral/mistral-small-latest](/mistral/mistral-small-latest)\n- [mistralai/mistral-nemo](/mistralai/mistral-nemo)\n- [x-ai/grok-3](/x-ai/grok-3)\n- [x-ai/grok-3-mini](/x-ai/grok-3-mini)\n- [x-ai/grok-4](/x-ai/grok-4)\n- [deepseek/deepseek-r1](/deepseek/deepseek-r1)\n- [meta-llama/llama-3.1-70b-instruct](/meta-llama/llama-3.1-70b-instruct)\n- [meta-llama/llama-3.1-405b-instruct](/meta-llama/llama-3.1-405b-instruct)\n- [mistralai/mixtral-8x22b-instruct](/mistralai/mixtral-8x22b-instruct)\n- [perplexity/sonar](/perplexity/sonar)\n- [cohere/command-r-plus](/cohere/command-r-plus)\n- [cohere/command-r](/cohere/command-r)",
"context_length": 2000000,
"architecture": {
"modality": "text->text",
@@ -15424,7 +16173,8 @@
"is_moderated": false
},
"per_request_limits": null,
- "supported_parameters": []
+ "supported_parameters": [],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4-1106-preview",
@@ -15474,55 +16224,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "openai/gpt-3.5-turbo-instruct",
- "canonical_slug": "openai/gpt-3.5-turbo-instruct",
- "hugging_face_id": null,
- "name": "OpenAI: GPT-3.5 Turbo Instruct",
- "created": 1695859200,
- "description": "This model is a variant of GPT-3.5 Turbo tuned for instructional prompts and omitting chat-related optimizations. Training data: up to Sep 2021.",
- "context_length": 4095,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "GPT",
- "instruct_type": "chatml"
- },
- "pricing": {
- "prompt": "0.0000015",
- "completion": "0.000002",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 4095,
- "max_completion_tokens": 4096,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mistralai/mistral-7b-instruct-v0.1",
@@ -15571,7 +16274,59 @@
"tools",
"top_k",
"top_p"
- ]
+ ],
+ "default_parameters": {
+ "temperature": 0.3
+ }
+ },
+ {
+ "id": "openai/gpt-3.5-turbo-instruct",
+ "canonical_slug": "openai/gpt-3.5-turbo-instruct",
+ "hugging_face_id": null,
+ "name": "OpenAI: GPT-3.5 Turbo Instruct",
+ "created": 1695859200,
+ "description": "This model is a variant of GPT-3.5 Turbo tuned for instructional prompts and omitting chat-related optimizations. Training data: up to Sep 2021.",
+ "context_length": 4095,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "GPT",
+ "instruct_type": "chatml"
+ },
+ "pricing": {
+ "prompt": "0.0000015",
+ "completion": "0.000002",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 4095,
+ "max_completion_tokens": 4096,
+ "is_moderated": true
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-3.5-turbo-16k",
@@ -15621,7 +16376,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "mancer/weaver",
@@ -15659,17 +16415,22 @@
"supported_parameters": [
"frequency_penalty",
"logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
"repetition_penalty",
+ "response_format",
"seed",
"stop",
+ "structured_outputs",
"temperature",
"top_a",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "undi95/remm-slerp-l2-13b",
@@ -15707,6 +16468,7 @@
"supported_parameters": [
"frequency_penalty",
"logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
@@ -15718,8 +16480,10 @@
"temperature",
"top_a",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "gryphe/mythomax-l2-13b",
@@ -15741,8 +16505,8 @@
"instruct_type": "alpaca"
},
"pricing": {
- "prompt": "0.00000006",
- "completion": "0.00000006",
+ "prompt": "0.00000005",
+ "completion": "0.00000009",
"request": "0",
"image": "0",
"web_search": "0",
@@ -15750,13 +16514,14 @@
},
"top_provider": {
"context_length": 4096,
- "max_completion_tokens": null,
+ "max_completion_tokens": 4096,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
"frequency_penalty",
"logit_bias",
+ "logprobs",
"max_tokens",
"min_p",
"presence_penalty",
@@ -15768,8 +16533,10 @@
"temperature",
"top_a",
"top_k",
+ "top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-3.5-turbo",
@@ -15819,57 +16586,8 @@
"tools",
"top_logprobs",
"top_p"
- ]
- },
- {
- "id": "openai/gpt-4",
- "canonical_slug": "openai/gpt-4",
- "hugging_face_id": null,
- "name": "OpenAI: GPT-4",
- "created": 1685232000,
- "description": "OpenAI's flagship model, GPT-4 is a large-scale multimodal language model capable of solving difficult problems with greater accuracy than previous models due to its broader general knowledge and advanced reasoning capabilities. Training data: up to Sep 2021.",
- "context_length": 8191,
- "architecture": {
- "modality": "text->text",
- "input_modalities": [
- "text"
- ],
- "output_modalities": [
- "text"
- ],
- "tokenizer": "GPT",
- "instruct_type": null
- },
- "pricing": {
- "prompt": "0.00003",
- "completion": "0.00006",
- "request": "0",
- "image": "0",
- "web_search": "0",
- "internal_reasoning": "0"
- },
- "top_provider": {
- "context_length": 8191,
- "max_completion_tokens": 4096,
- "is_moderated": true
- },
- "per_request_limits": null,
- "supported_parameters": [
- "frequency_penalty",
- "logit_bias",
- "logprobs",
- "max_tokens",
- "presence_penalty",
- "response_format",
- "seed",
- "stop",
- "structured_outputs",
- "temperature",
- "tool_choice",
- "tools",
- "top_logprobs",
- "top_p"
- ]
+ ],
+ "default_parameters": {}
},
{
"id": "openai/gpt-4-0314",
@@ -15919,7 +16637,59 @@
"tools",
"top_logprobs",
"top_p"
- ]
+ ],
+ "default_parameters": {}
+ },
+ {
+ "id": "openai/gpt-4",
+ "canonical_slug": "openai/gpt-4",
+ "hugging_face_id": null,
+ "name": "OpenAI: GPT-4",
+ "created": 1685232000,
+ "description": "OpenAI's flagship model, GPT-4 is a large-scale multimodal language model capable of solving difficult problems with greater accuracy than previous models due to its broader general knowledge and advanced reasoning capabilities. Training data: up to Sep 2021.",
+ "context_length": 8191,
+ "architecture": {
+ "modality": "text->text",
+ "input_modalities": [
+ "text"
+ ],
+ "output_modalities": [
+ "text"
+ ],
+ "tokenizer": "GPT",
+ "instruct_type": null
+ },
+ "pricing": {
+ "prompt": "0.00003",
+ "completion": "0.00006",
+ "request": "0",
+ "image": "0",
+ "web_search": "0",
+ "internal_reasoning": "0"
+ },
+ "top_provider": {
+ "context_length": 8191,
+ "max_completion_tokens": 4096,
+ "is_moderated": true
+ },
+ "per_request_limits": null,
+ "supported_parameters": [
+ "frequency_penalty",
+ "logit_bias",
+ "logprobs",
+ "max_tokens",
+ "presence_penalty",
+ "response_format",
+ "seed",
+ "stop",
+ "structured_outputs",
+ "temperature",
+ "tool_choice",
+ "tools",
+ "top_logprobs",
+ "top_p"
+ ],
+ "default_parameters": {}
}
]
}
\ No newline at end of file
diff --git a/packages/kbot/dist-in/src/models/cache/openai.ts b/packages/kbot/dist-in/src/models/cache/openai.ts
index 251ea3da..5c8b82e1 100644
--- a/packages/kbot/dist-in/src/models/cache/openai.ts
+++ b/packages/kbot/dist-in/src/models/cache/openai.ts
@@ -1 +1 @@
-export const models = [{"id":"gpt-4-0613","object":"model","created":1686588896,"owned_by":"openai"},{"id":"gpt-4","object":"model","created":1687882411,"owned_by":"openai"},{"id":"gpt-3.5-turbo","object":"model","created":1677610602,"owned_by":"openai"},{"id":"gpt-audio","object":"model","created":1756339249,"owned_by":"system"},{"id":"gpt-5-nano","object":"model","created":1754426384,"owned_by":"system"},{"id":"gpt-audio-2025-08-28","object":"model","created":1756256146,"owned_by":"system"},{"id":"gpt-realtime","object":"model","created":1756271701,"owned_by":"system"},{"id":"gpt-realtime-2025-08-28","object":"model","created":1756271773,"owned_by":"system"},{"id":"davinci-002","object":"model","created":1692634301,"owned_by":"system"},{"id":"babbage-002","object":"model","created":1692634615,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct","object":"model","created":1692901427,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct-0914","object":"model","created":1694122472,"owned_by":"system"},{"id":"dall-e-3","object":"model","created":1698785189,"owned_by":"system"},{"id":"dall-e-2","object":"model","created":1698798177,"owned_by":"system"},{"id":"gpt-4-1106-preview","object":"model","created":1698957206,"owned_by":"system"},{"id":"gpt-3.5-turbo-1106","object":"model","created":1698959748,"owned_by":"system"},{"id":"tts-1-hd","object":"model","created":1699046015,"owned_by":"system"},{"id":"tts-1-1106","object":"model","created":1699053241,"owned_by":"system"},{"id":"tts-1-hd-1106","object":"model","created":1699053533,"owned_by":"system"},{"id":"text-embedding-3-small","object":"model","created":1705948997,"owned_by":"system"},{"id":"text-embedding-3-large","object":"model","created":1705953180,"owned_by":"system"},{"id":"gpt-4-0125-preview","object":"model","created":1706037612,"owned_by":"system"},{"id":"gpt-4-turbo-preview","object":"model","created":1706037777,"owned_by":"system"},{"id":"gpt-3.5-turbo-0125","object":"model","created":1706048358,"owned_by":"system"},{"id":"gpt-4-turbo","object":"model","created":1712361441,"owned_by":"system"},{"id":"gpt-4-turbo-2024-04-09","object":"model","created":1712601677,"owned_by":"system"},{"id":"gpt-4o","object":"model","created":1715367049,"owned_by":"system"},{"id":"gpt-4o-2024-05-13","object":"model","created":1715368132,"owned_by":"system"},{"id":"gpt-4o-mini-2024-07-18","object":"model","created":1721172717,"owned_by":"system"},{"id":"gpt-4o-mini","object":"model","created":1721172741,"owned_by":"system"},{"id":"gpt-4o-2024-08-06","object":"model","created":1722814719,"owned_by":"system"},{"id":"chatgpt-4o-latest","object":"model","created":1723515131,"owned_by":"system"},{"id":"o1-mini-2024-09-12","object":"model","created":1725648979,"owned_by":"system"},{"id":"o1-mini","object":"model","created":1725649008,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-10-01","object":"model","created":1727131766,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-10-01","object":"model","created":1727389042,"owned_by":"system"},{"id":"gpt-4o-audio-preview","object":"model","created":1727460443,"owned_by":"system"},{"id":"gpt-4o-realtime-preview","object":"model","created":1727659998,"owned_by":"system"},{"id":"omni-moderation-latest","object":"model","created":1731689265,"owned_by":"system"},{"id":"omni-moderation-2024-09-26","object":"model","created":1732734466,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-12-17","object":"model","created":1733945430,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-12-17","object":"model","created":1734034239,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview-2024-12-17","object":"model","created":1734112601,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview-2024-12-17","object":"model","created":1734115920,"owned_by":"system"},{"id":"o1-2024-12-17","object":"model","created":1734326976,"owned_by":"system"},{"id":"o1","object":"model","created":1734375816,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview","object":"model","created":1734387380,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview","object":"model","created":1734387424,"owned_by":"system"},{"id":"o3-mini","object":"model","created":1737146383,"owned_by":"system"},{"id":"o3-mini-2025-01-31","object":"model","created":1738010200,"owned_by":"system"},{"id":"gpt-4o-2024-11-20","object":"model","created":1739331543,"owned_by":"system"},{"id":"gpt-4o-search-preview-2025-03-11","object":"model","created":1741388170,"owned_by":"system"},{"id":"gpt-4o-search-preview","object":"model","created":1741388720,"owned_by":"system"},{"id":"gpt-4o-mini-search-preview-2025-03-11","object":"model","created":1741390858,"owned_by":"system"},{"id":"gpt-4o-mini-search-preview","object":"model","created":1741391161,"owned_by":"system"},{"id":"gpt-4o-transcribe","object":"model","created":1742068463,"owned_by":"system"},{"id":"gpt-4o-mini-transcribe","object":"model","created":1742068596,"owned_by":"system"},{"id":"o1-pro-2025-03-19","object":"model","created":1742251504,"owned_by":"system"},{"id":"o1-pro","object":"model","created":1742251791,"owned_by":"system"},{"id":"gpt-4o-mini-tts","object":"model","created":1742403959,"owned_by":"system"},{"id":"o3-2025-04-16","object":"model","created":1744133301,"owned_by":"system"},{"id":"o4-mini-2025-04-16","object":"model","created":1744133506,"owned_by":"system"},{"id":"o3","object":"model","created":1744225308,"owned_by":"system"},{"id":"o4-mini","object":"model","created":1744225351,"owned_by":"system"},{"id":"gpt-4.1-2025-04-14","object":"model","created":1744315746,"owned_by":"system"},{"id":"gpt-4.1","object":"model","created":1744316542,"owned_by":"system"},{"id":"gpt-4.1-mini-2025-04-14","object":"model","created":1744317547,"owned_by":"system"},{"id":"gpt-4.1-mini","object":"model","created":1744318173,"owned_by":"system"},{"id":"gpt-4.1-nano-2025-04-14","object":"model","created":1744321025,"owned_by":"system"},{"id":"gpt-4.1-nano","object":"model","created":1744321707,"owned_by":"system"},{"id":"gpt-image-1","object":"model","created":1745517030,"owned_by":"system"},{"id":"codex-mini-latest","object":"model","created":1746673257,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2025-06-03","object":"model","created":1748907838,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2025-06-03","object":"model","created":1748908498,"owned_by":"system"},{"id":"o4-mini-deep-research","object":"model","created":1749685485,"owned_by":"system"},{"id":"o4-mini-deep-research-2025-06-26","object":"model","created":1750866121,"owned_by":"system"},{"id":"gpt-5-chat-latest","object":"model","created":1754073306,"owned_by":"system"},{"id":"gpt-5-2025-08-07","object":"model","created":1754075360,"owned_by":"system"},{"id":"gpt-5","object":"model","created":1754425777,"owned_by":"system"},{"id":"gpt-5-mini-2025-08-07","object":"model","created":1754425867,"owned_by":"system"},{"id":"gpt-5-mini","object":"model","created":1754425928,"owned_by":"system"},{"id":"gpt-5-nano-2025-08-07","object":"model","created":1754426303,"owned_by":"system"},{"id":"gpt-3.5-turbo-16k","object":"model","created":1683758102,"owned_by":"openai-internal"},{"id":"tts-1","object":"model","created":1681940951,"owned_by":"openai-internal"},{"id":"whisper-1","object":"model","created":1677532384,"owned_by":"openai-internal"},{"id":"text-embedding-ada-002","object":"model","created":1671217299,"owned_by":"openai-internal"}]
\ No newline at end of file
+export const models = [{"id":"gpt-4-0613","object":"model","created":1686588896,"owned_by":"openai"},{"id":"gpt-4","object":"model","created":1687882411,"owned_by":"openai"},{"id":"gpt-3.5-turbo","object":"model","created":1677610602,"owned_by":"openai"},{"id":"sora-2-pro","object":"model","created":1759708663,"owned_by":"system"},{"id":"gpt-audio-mini-2025-10-06","object":"model","created":1759512137,"owned_by":"system"},{"id":"gpt-realtime-mini","object":"model","created":1759517133,"owned_by":"system"},{"id":"gpt-realtime-mini-2025-10-06","object":"model","created":1759517175,"owned_by":"system"},{"id":"sora-2","object":"model","created":1759708615,"owned_by":"system"},{"id":"davinci-002","object":"model","created":1692634301,"owned_by":"system"},{"id":"babbage-002","object":"model","created":1692634615,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct","object":"model","created":1692901427,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct-0914","object":"model","created":1694122472,"owned_by":"system"},{"id":"dall-e-3","object":"model","created":1698785189,"owned_by":"system"},{"id":"dall-e-2","object":"model","created":1698798177,"owned_by":"system"},{"id":"gpt-4-1106-preview","object":"model","created":1698957206,"owned_by":"system"},{"id":"gpt-3.5-turbo-1106","object":"model","created":1698959748,"owned_by":"system"},{"id":"tts-1-hd","object":"model","created":1699046015,"owned_by":"system"},{"id":"tts-1-1106","object":"model","created":1699053241,"owned_by":"system"},{"id":"tts-1-hd-1106","object":"model","created":1699053533,"owned_by":"system"},{"id":"text-embedding-3-small","object":"model","created":1705948997,"owned_by":"system"},{"id":"text-embedding-3-large","object":"model","created":1705953180,"owned_by":"system"},{"id":"gpt-4-0125-preview","object":"model","created":1706037612,"owned_by":"system"},{"id":"gpt-4-turbo-preview","object":"model","created":1706037777,"owned_by":"system"},{"id":"gpt-3.5-turbo-0125","object":"model","created":1706048358,"owned_by":"system"},{"id":"gpt-4-turbo","object":"model","created":1712361441,"owned_by":"system"},{"id":"gpt-4-turbo-2024-04-09","object":"model","created":1712601677,"owned_by":"system"},{"id":"gpt-4o","object":"model","created":1715367049,"owned_by":"system"},{"id":"gpt-4o-2024-05-13","object":"model","created":1715368132,"owned_by":"system"},{"id":"gpt-4o-mini-2024-07-18","object":"model","created":1721172717,"owned_by":"system"},{"id":"gpt-4o-mini","object":"model","created":1721172741,"owned_by":"system"},{"id":"gpt-4o-2024-08-06","object":"model","created":1722814719,"owned_by":"system"},{"id":"chatgpt-4o-latest","object":"model","created":1723515131,"owned_by":"system"},{"id":"o1-mini-2024-09-12","object":"model","created":1725648979,"owned_by":"system"},{"id":"o1-mini","object":"model","created":1725649008,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-10-01","object":"model","created":1727131766,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-10-01","object":"model","created":1727389042,"owned_by":"system"},{"id":"gpt-4o-audio-preview","object":"model","created":1727460443,"owned_by":"system"},{"id":"gpt-4o-realtime-preview","object":"model","created":1727659998,"owned_by":"system"},{"id":"omni-moderation-latest","object":"model","created":1731689265,"owned_by":"system"},{"id":"omni-moderation-2024-09-26","object":"model","created":1732734466,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-12-17","object":"model","created":1733945430,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-12-17","object":"model","created":1734034239,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview-2024-12-17","object":"model","created":1734112601,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview-2024-12-17","object":"model","created":1734115920,"owned_by":"system"},{"id":"o1-2024-12-17","object":"model","created":1734326976,"owned_by":"system"},{"id":"o1","object":"model","created":1734375816,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview","object":"model","created":1734387380,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview","object":"model","created":1734387424,"owned_by":"system"},{"id":"o3-mini","object":"model","created":1737146383,"owned_by":"system"},{"id":"o3-mini-2025-01-31","object":"model","created":1738010200,"owned_by":"system"},{"id":"gpt-4o-2024-11-20","object":"model","created":1739331543,"owned_by":"system"},{"id":"gpt-4o-search-preview-2025-03-11","object":"model","created":1741388170,"owned_by":"system"},{"id":"gpt-4o-search-preview","object":"model","created":1741388720,"owned_by":"system"},{"id":"gpt-4o-mini-search-preview-2025-03-11","object":"model","created":1741390858,"owned_by":"system"},{"id":"gpt-4o-mini-search-preview","object":"model","created":1741391161,"owned_by":"system"},{"id":"gpt-4o-transcribe","object":"model","created":1742068463,"owned_by":"system"},{"id":"gpt-4o-mini-transcribe","object":"model","created":1742068596,"owned_by":"system"},{"id":"o1-pro-2025-03-19","object":"model","created":1742251504,"owned_by":"system"},{"id":"o1-pro","object":"model","created":1742251791,"owned_by":"system"},{"id":"gpt-4o-mini-tts","object":"model","created":1742403959,"owned_by":"system"},{"id":"o3-2025-04-16","object":"model","created":1744133301,"owned_by":"system"},{"id":"o4-mini-2025-04-16","object":"model","created":1744133506,"owned_by":"system"},{"id":"o3","object":"model","created":1744225308,"owned_by":"system"},{"id":"o4-mini","object":"model","created":1744225351,"owned_by":"system"},{"id":"gpt-4.1-2025-04-14","object":"model","created":1744315746,"owned_by":"system"},{"id":"gpt-4.1","object":"model","created":1744316542,"owned_by":"system"},{"id":"gpt-4.1-mini-2025-04-14","object":"model","created":1744317547,"owned_by":"system"},{"id":"gpt-4.1-mini","object":"model","created":1744318173,"owned_by":"system"},{"id":"gpt-4.1-nano-2025-04-14","object":"model","created":1744321025,"owned_by":"system"},{"id":"gpt-4.1-nano","object":"model","created":1744321707,"owned_by":"system"},{"id":"gpt-image-1","object":"model","created":1745517030,"owned_by":"system"},{"id":"codex-mini-latest","object":"model","created":1746673257,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2025-06-03","object":"model","created":1748907838,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2025-06-03","object":"model","created":1748908498,"owned_by":"system"},{"id":"o4-mini-deep-research","object":"model","created":1749685485,"owned_by":"system"},{"id":"o4-mini-deep-research-2025-06-26","object":"model","created":1750866121,"owned_by":"system"},{"id":"gpt-5-chat-latest","object":"model","created":1754073306,"owned_by":"system"},{"id":"gpt-5-2025-08-07","object":"model","created":1754075360,"owned_by":"system"},{"id":"gpt-5","object":"model","created":1754425777,"owned_by":"system"},{"id":"gpt-5-mini-2025-08-07","object":"model","created":1754425867,"owned_by":"system"},{"id":"gpt-5-mini","object":"model","created":1754425928,"owned_by":"system"},{"id":"gpt-5-nano-2025-08-07","object":"model","created":1754426303,"owned_by":"system"},{"id":"gpt-5-nano","object":"model","created":1754426384,"owned_by":"system"},{"id":"gpt-audio-2025-08-28","object":"model","created":1756256146,"owned_by":"system"},{"id":"gpt-realtime","object":"model","created":1756271701,"owned_by":"system"},{"id":"gpt-realtime-2025-08-28","object":"model","created":1756271773,"owned_by":"system"},{"id":"gpt-audio","object":"model","created":1756339249,"owned_by":"system"},{"id":"gpt-5-codex","object":"model","created":1757527818,"owned_by":"system"},{"id":"gpt-image-1-mini","object":"model","created":1758845821,"owned_by":"system"},{"id":"gpt-5-pro-2025-10-06","object":"model","created":1759469707,"owned_by":"system"},{"id":"gpt-5-pro","object":"model","created":1759469822,"owned_by":"system"},{"id":"gpt-audio-mini","object":"model","created":1759512027,"owned_by":"system"},{"id":"gpt-3.5-turbo-16k","object":"model","created":1683758102,"owned_by":"openai-internal"},{"id":"tts-1","object":"model","created":1681940951,"owned_by":"openai-internal"},{"id":"whisper-1","object":"model","created":1677532384,"owned_by":"openai-internal"},{"id":"text-embedding-ada-002","object":"model","created":1671217299,"owned_by":"openai-internal"}]
\ No newline at end of file
diff --git a/packages/kbot/dist-in/src/models/cache/openrouter.ts b/packages/kbot/dist-in/src/models/cache/openrouter.ts
index 1b0f7976..45fd73ba 100644
--- a/packages/kbot/dist-in/src/models/cache/openrouter.ts
+++ b/packages/kbot/dist-in/src/models/cache/openrouter.ts
@@ -1 +1 @@
-export const models = [{"id":"x-ai/grok-4-fast:free","name":"xAI: Grok 4 Fast (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758240090,"top_provider":{"context_length":2000000,"max_completion_tokens":30000,"is_moderated":false}},{"id":"alibaba/tongyi-deepresearch-30b-a3b","name":"Tongyi DeepResearch 30B A3B","pricing":{"prompt":"0.00000009","completion":"0.00000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758210804,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-coder-flash","name":"Qwen: Qwen3 Coder Flash","pricing":{"prompt":"0.0000003","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008"},"created":1758115536,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-coder-plus","name":"Qwen: Qwen3 Coder Plus","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1758115194,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"arcee-ai/afm-4.5b","name":"Arcee AI: AFM 4.5B","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758040484,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"opengvlab/internvl3-78b","name":"OpenGVLab: InternVL3 78B","pricing":{"prompt":"0.00000003","completion":"0.00000013","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757962555,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-thinking","name":"Qwen: Qwen3 Next 80B A3B Thinking","pricing":{"prompt":"0.0000001","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612284,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct","name":"Qwen: Qwen3 Next 80B A3B Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"meituan/longcat-flash-chat","name":"Meituan: LongCat Flash Chat","pricing":{"prompt":"0.00000012","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757427658,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28","name":"Qwen: Qwen Plus 0728","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28:thinking","name":"Qwen: Qwen Plus 0728 (thinking)","pricing":{"prompt":"0.0000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2:free","name":"NVIDIA: Nemotron Nano 9B V2 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2","name":"NVIDIA: Nemotron Nano 9B V2","pricing":{"prompt":"0.00000004","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-max","name":"Qwen: Qwen3 Max","pricing":{"prompt":"0.0000012","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000024"},"created":1757076567,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905","name":"MoonshotAI: Kimi K2 0905","pricing":{"prompt":"0.00000038","completion":"0.00000152","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance/seed-oss-36b-instruct","name":"ByteDance: Seed OSS 36B Instruct","pricing":{"prompt":"0.00000016","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756834704,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-109b-moe","name":"Cogito V2 Preview Llama 109B","pricing":{"prompt":"0.00000018","completion":"0.00000059","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756831568,"top_provider":{"context_length":32767,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-deepseek-671b","name":"Deep Cogito: Cogito V2 Preview Deepseek 671B","pricing":{"prompt":"0.00000125","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756830949,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"stepfun-ai/step3","name":"StepFun: Step3","pricing":{"prompt":"0.00000057","completion":"0.00000142","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756415375,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-thinking-2507","name":"Qwen: Qwen3 30B A3B Thinking 2507","pricing":{"prompt":"0.00000008","completion":"0.00000029","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756399192,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"x-ai/grok-code-fast-1","name":"xAI: Grok Code Fast 1","pricing":{"prompt":"0.0000002","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1756238927,"top_provider":{"context_length":256000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"nousresearch/hermes-4-70b","name":"Nous: Hermes 4 70B","pricing":{"prompt":"0.00000011","completion":"0.00000038","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756236182,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-4-405b","name":"Nous: Hermes 4 405B","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756235463,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image-preview","name":"Google: Gemini 2.5 Flash Image Preview","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0"},"created":1756218977,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1:free","name":"DeepSeek: DeepSeek V3.1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1","name":"DeepSeek: DeepSeek V3.1","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-v3.1-base","name":"DeepSeek: DeepSeek V3.1 Base","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755727017,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-audio-preview","name":"OpenAI: GPT-4o Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","audio":"0.00004","web_search":"0","internal_reasoning":"0"},"created":1755233061,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-medium-3.1","name":"Mistral: Mistral Medium 3.1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755095639,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b","name":"Baidu: ERNIE 4.5 21B A3B","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755034167,"top_provider":{"context_length":120000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-28b-a3b","name":"Baidu: ERNIE 4.5 VL 28B A3B","pricing":{"prompt":"0.00000014","completion":"0.00000056","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755032836,"top_provider":{"context_length":30000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"z-ai/glm-4.5v","name":"Z.AI: GLM 4.5V","pricing":{"prompt":"0.0000005","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754922288,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"ai21/jamba-mini-1.7","name":"AI21: Jamba Mini 1.7","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754670601,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-large-1.7","name":"AI21: Jamba Large 1.7","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754669020,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-5-chat","name":"OpenAI: GPT-5 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587837,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5","name":"OpenAI: GPT-5","pricing":{"prompt":"0.000000625","completion":"0.000005","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0","input_cache_read":"0.0000000625"},"created":1754587413,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-mini","name":"OpenAI: GPT-5 Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1754587407,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-nano","name":"OpenAI: GPT-5 Nano","pricing":{"prompt":"0.00000005","completion":"0.0000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000005"},"created":1754587402,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-oss-120b:free","name":"OpenAI: gpt-oss-120b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":true}},{"id":"openai/gpt-oss-120b","name":"OpenAI: gpt-oss-120b","pricing":{"prompt":"0.00000005","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-20b:free","name":"OpenAI: gpt-oss-20b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-oss-20b","name":"OpenAI: gpt-oss-20b","pricing":{"prompt":"0.00000003","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"anthropic/claude-opus-4.1","name":"Anthropic: Claude Opus 4.1","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1754411591,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"mistralai/codestral-2508","name":"Mistral: Codestral 2508","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754079630,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder-30b-a3b-instruct","name":"Qwen: Qwen3 Coder 30B A3B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753972379,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-instruct-2507","name":"Qwen: Qwen3 30B A3B Instruct 2507","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753806965,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5","name":"Z.AI: GLM 4.5","pricing":{"prompt":"0.00000041","completion":"0.00000165","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471347,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5-air:free","name":"Z.AI: GLM 4.5 Air (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5-air","name":"Z.AI: GLM 4.5 Air","pricing":{"prompt":"0.00000014","completion":"0.00000086","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-thinking-2507","name":"Qwen: Qwen3 235B A22B Thinking 2507","pricing":{"prompt":"0.0000001","completion":"0.00000039","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753449557,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4-32b","name":"Z.AI: GLM 4 32B ","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753376617,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder:free","name":"Qwen: Qwen3 Coder 480B A35B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder","name":"Qwen: Qwen3 Coder 480B A35B","pricing":{"prompt":"0.00000022","completion":"0.00000095","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance/ui-tars-1.5-7b","name":"ByteDance: UI-TARS 7B ","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753205056,"top_provider":{"context_length":128000,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite","name":"Google: Gemini 2.5 Flash Lite","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1753200276,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-2507","name":"Qwen: Qwen3 235B A22B Instruct 2507","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753119555,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"switchpoint/router","name":"Switchpoint Router","pricing":{"prompt":"0.00000085","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752272899,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2:free","name":"MoonshotAI: Kimi K2 0711 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2","name":"MoonshotAI: Kimi K2 0711","pricing":{"prompt":"0.00000014","completion":"0.00000249","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":63000,"max_completion_tokens":63000,"is_moderated":false}},{"id":"thudm/glm-4.1v-9b-thinking","name":"THUDM: GLM 4.1V 9B Thinking","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752244385,"top_provider":{"context_length":65536,"max_completion_tokens":8000,"is_moderated":false}},{"id":"mistralai/devstral-medium","name":"Mistral: Devstral Medium","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752161321,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small 1.1","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752160751,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mistral-24b-venice-edition:free","name":"Venice: Uncensored (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752094966,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4","name":"xAI: Grok 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1752087689,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e2b-it:free","name":"Google: Gemma 3n 2B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752074904,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct:free","name":"Tencent: Hunyuan A13B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct","name":"Tencent: Hunyuan A13B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera:free","name":"TNG: DeepSeek R1T2 Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"morph/morph-v3-large","name":"Morph: Morph V3 Large","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910858,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"morph/morph-v3-fast","name":"Morph: Morph V3 Fast","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910002,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-424b-a47b","name":"Baidu: ERNIE 4.5 VL 424B A47B ","pricing":{"prompt":"0.00000042","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300903,"top_provider":{"context_length":123000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"thedrummer/anubis-70b-v1.1","name":"TheDrummer: Anubis 70B V1.1","pricing":{"prompt":"0.0000004","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751208347,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750973026,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct:free","name":"Mistral: Mistral Small 3.2 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.000000075","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000003","completion":"0.00000165","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-06-17","name":"Google: Gemini 2.5 Flash Lite Preview 06-17","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1750173831,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b:free","name":"MoonshotAI: Kimi Dev 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b","name":"MoonshotAI: Kimi Dev 72B","pricing":{"prompt":"0.00000029","completion":"0.00000115","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","request":"0","image":"0.0153","web_search":"0.01","internal_reasoning":"0"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/magistral-small-2506","name":"Mistral: Magistral Small 2506","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749569561,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506","name":"Mistral: Magistral Medium 2506","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506:thinking","name":"Mistral: Magistral Medium 2506 (thinking)","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b:free","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B","pricing":{"prompt":"0.00000001","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000004","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/devstral-small-2505:free","name":"Mistral: Devstral Small 2505 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small-2505","name":"Mistral: Devstral Small 2505","pricing":{"prompt":"0.00000004","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/codex-mini","name":"OpenAI: Codex Mini","pricing":{"prompt":"0.0000015","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000375"},"created":1747409761,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"meta-llama/llama-3.3-8b-instruct:free","name":"Meta: Llama 3.3 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747230154,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"nousresearch/deephermes-3-mistral-24b-preview","name":"Nous: DeepHermes 3 Mistral 24B Preview","pricing":{"prompt":"0.00000013","completion":"0.00000051","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746830904,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-reasoning-plus","name":"Microsoft: Phi 4 Reasoning Plus","pricing":{"prompt":"0.00000007","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746130961,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746033880,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen3-4b:free","name":"Qwen: Qwen3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746031104,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-prover-v2","name":"DeepSeek: DeepSeek Prover V2","pricing":{"prompt":"0.0000005","completion":"0.00000218","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746013094,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b:free","name":"Qwen: Qwen3 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000006","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-8b:free","name":"Qwen: Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":128000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen3-14b:free","name":"Qwen: Qwen3 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.00000003","completion":"0.00000013","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b:free","name":"Qwen: Qwen3 235B A22B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.00000018","completion":"0.00000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera","name":"TNG: DeepSeek R1T Chimera","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1:free","name":"Microsoft: MAI DS R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1","name":"Microsoft: MAI DS R1","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-32b","name":"THUDM: GLM Z1 32B","pricing":{"prompt":"0.00000004","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0.00153","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"shisa-ai/shisa-v2-llama3.3-70b:free","name":"Shisa AI: Shisa V2 Llama 3.3 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"shisa-ai/shisa-v2-llama3.3-70b","name":"Shisa AI: Shisa V2 Llama 3.3 70B ","pricing":{"prompt":"0.00000004","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1:free","name":"ArliAI: QwQ 32B RpR v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1","name":"ArliAI: QwQ 32B RpR v1","pricing":{"prompt":"0.00000002","completion":"0.00000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview:free","name":"Agentica: Deepcoder 14B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview","name":"Agentica: Deepcoder 14B Preview","pricing":{"prompt":"0.000000015","completion":"0.000000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking:free","name":"MoonshotAI: Kimi VL A3B Thinking (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking","name":"MoonshotAI: Kimi VL A3B Thinking","pricing":{"prompt":"0.00000002","completion":"0.00000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick:free","name":"Meta: Llama 4 Maverick (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout:free","name":"Meta: Llama 4 Scout (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":1048576,"max_completion_tokens":1048576,"is_moderated":false}},{"id":"allenai/molmo-7b-d","name":"AllenAI: Molmo 7B D","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743023247,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.00000004","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000004","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":131072,"max_completion_tokens":96000,"is_moderated":false}},{"id":"allenai/olmo-2-0325-32b-instruct","name":"AllenAI: Olmo 2 32B Instruct","pricing":{"prompt":"0.000001","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741988556,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000004","completion":"0.00000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000004","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000007","completion":"0.00000026","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/anubis-pro-105b-v1","name":"TheDrummer: Anubis Pro 105B V1","pricing":{"prompt":"0.0000005","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741642290,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.00000004","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b:free","name":"Qwen: QwQ 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.00000015","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b:free","name":"Dolphin3.0 R1 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b","name":"Dolphin3.0 R1 Mistral 24B","pricing":{"prompt":"0.00000001","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b","name":"Dolphin3.0 Mistral 24B","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-llama-8b","name":"DeepSeek: R1 Distill Llama 8B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738937718,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","audio":"0.0000007","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000004","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.00000027","completion":"0.00000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.00000003","completion":"0.00000013","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000006","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.000000012","completion":"0.000000036","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"qwen/qwq-32b-preview","name":"Qwen: QwQ 32B Preview","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732754541,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000006","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b","name":"Google: Gemini 1.5 Flash 8B","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001","input_cache_write":"0.0000000583"},"created":1727913600,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.00000017","completion":"0.00000043","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000005","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.00000035","completion":"0.0000004","request":"0","image":"0.0005058","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000026","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.00000009","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723939200,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000004","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000002","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct:free","name":"Meta: Llama 3.1 405B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000065","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000001","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-70b","name":"NeverSleep: Llama 3 Lumimaid 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715817600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemini-flash-1.5","name":"Google: Gemini 1.5 Flash ","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0.00004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001875","input_cache_write":"0.0000001583"},"created":1715644800,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-pro-1.5","name":"Google: Gemini 1.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.000005","request":"0","image":"0.0006575","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-plus","name":"Cohere: Command R+","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712188800,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-04-2024","name":"Cohere: Command R+ (04-2024)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712016000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command","name":"Cohere: Command","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":4096,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r","name":"Cohere: Command R","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-03-2024","name":"Cohere: Command R (03-2024)","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1709341200,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.000001","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700956800,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000004","completion":"0.0000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":512,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.000001125","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":2000,"is_moderated":false}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.00000045","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1689984000,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.00000006","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}]
\ No newline at end of file
+export const models = [{"id":"inclusionai/ling-1t","name":"inclusionAI: Ling-1T","pricing":{"prompt":"0.000001","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760316076,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1.5","name":"NVIDIA: Llama 3.3 Nemotron Super 49B V1.5","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760101395,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b-thinking","name":"Baidu: ERNIE 4.5 21B A3B Thinking","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760048887,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image","name":"Google: Gemini 2.5 Flash Image (Nano Banana)","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0"},"created":1759870431,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen3-vl-30b-a3b-thinking","name":"Qwen: Qwen3 VL 30B A3B Thinking","pricing":{"prompt":"0.00000029","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759794479,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-vl-30b-a3b-instruct","name":"Qwen: Qwen3 VL 30B A3B Instruct","pricing":{"prompt":"0.00000029","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759794476,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"openai/gpt-5-pro","name":"OpenAI: GPT-5 Pro","pricing":{"prompt":"0.000015","completion":"0.00012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759776663,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"z-ai/glm-4.6","name":"Z.AI: GLM 4.6","pricing":{"prompt":"0.0000005","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759235576,"top_provider":{"context_length":202752,"max_completion_tokens":202752,"is_moderated":false}},{"id":"anthropic/claude-sonnet-4.5","name":"Anthropic: Claude Sonnet 4.5","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759161676,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"deepseek/deepseek-v3.2-exp","name":"DeepSeek: DeepSeek V3.2 Exp","pricing":{"prompt":"0.00000027","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759150481,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/cydonia-24b-v4.1","name":"TheDrummer: Cydonia 24B V4.1","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758931878,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"relace/relace-apply-3","name":"Relace: Relace Apply 3","pricing":{"prompt":"0.00000085","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758891572,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-09-2025","name":"Google: Gemini 2.5 Flash Preview 09-2025","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1758820178,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-09-2025","name":"Google: Gemini 2.5 Flash Lite Preview 09-2025","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758819686,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-vl-235b-a22b-thinking","name":"Qwen: Qwen3 VL 235B A22B Thinking","pricing":{"prompt":"0.00000045","completion":"0.0000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758668690,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-vl-235b-a22b-instruct","name":"Qwen: Qwen3 VL 235B A22B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758668687,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-max","name":"Qwen: Qwen3 Max","pricing":{"prompt":"0.0000012","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000024"},"created":1758662808,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-coder-plus","name":"Qwen: Qwen3 Coder Plus","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1758662707,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openai/gpt-5-codex","name":"OpenAI: GPT-5 Codex","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1758643403,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"deepseek/deepseek-v3.1-terminus","name":"DeepSeek: DeepSeek V3.1 Terminus","pricing":{"prompt":"0.00000023","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758548275,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"x-ai/grok-4-fast","name":"xAI: Grok 4 Fast","pricing":{"prompt":"0.0000002","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000005"},"created":1758240090,"top_provider":{"context_length":2000000,"max_completion_tokens":30000,"is_moderated":false}},{"id":"alibaba/tongyi-deepresearch-30b-a3b:free","name":"Tongyi DeepResearch 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758210804,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"alibaba/tongyi-deepresearch-30b-a3b","name":"Tongyi DeepResearch 30B A3B","pricing":{"prompt":"0.00000009","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758210804,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-coder-flash","name":"Qwen: Qwen3 Coder Flash","pricing":{"prompt":"0.0000003","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008"},"created":1758115536,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"arcee-ai/afm-4.5b","name":"Arcee AI: AFM 4.5B","pricing":{"prompt":"0.000000048","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758040484,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"opengvlab/internvl3-78b","name":"OpenGVLab: InternVL3 78B","pricing":{"prompt":"0.00000007","completion":"0.00000026","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757962555,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-thinking","name":"Qwen: Qwen3 Next 80B A3B Thinking","pricing":{"prompt":"0.00000014","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612284,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct","name":"Qwen: Qwen3 Next 80B A3B Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"meituan/longcat-flash-chat:free","name":"Meituan: LongCat Flash Chat (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757427658,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"meituan/longcat-flash-chat","name":"Meituan: LongCat Flash Chat","pricing":{"prompt":"0.00000015","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757427658,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28","name":"Qwen: Qwen Plus 0728","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28:thinking","name":"Qwen: Qwen Plus 0728 (thinking)","pricing":{"prompt":"0.0000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2:free","name":"NVIDIA: Nemotron Nano 9B V2 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2","name":"NVIDIA: Nemotron Nano 9B V2","pricing":{"prompt":"0.00000004","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905","name":"MoonshotAI: Kimi K2 0905","pricing":{"prompt":"0.00000039","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-109b-moe","name":"Cogito V2 Preview Llama 109B","pricing":{"prompt":"0.00000018","completion":"0.00000059","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756831568,"top_provider":{"context_length":32767,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-deepseek-671b","name":"Deep Cogito: Cogito V2 Preview Deepseek 671B","pricing":{"prompt":"0.00000125","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756830949,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"stepfun-ai/step3","name":"StepFun: Step3","pricing":{"prompt":"0.00000057","completion":"0.00000142","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756415375,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-thinking-2507","name":"Qwen: Qwen3 30B A3B Thinking 2507","pricing":{"prompt":"0.00000008","completion":"0.00000029","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756399192,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"x-ai/grok-code-fast-1","name":"xAI: Grok Code Fast 1","pricing":{"prompt":"0.0000002","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1756238927,"top_provider":{"context_length":256000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"nousresearch/hermes-4-70b","name":"Nous: Hermes 4 70B","pricing":{"prompt":"0.00000011","completion":"0.00000038","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756236182,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nousresearch/hermes-4-405b","name":"Nous: Hermes 4 405B","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756235463,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image-preview","name":"Google: Gemini 2.5 Flash Image Preview (Nano Banana)","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0"},"created":1756218977,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1:free","name":"DeepSeek: DeepSeek V3.1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":163800,"max_completion_tokens":null,"is_moderated":true}},{"id":"deepseek/deepseek-chat-v3.1","name":"DeepSeek: DeepSeek V3.1","pricing":{"prompt":"0.0000002","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"openai/gpt-4o-audio-preview","name":"OpenAI: GPT-4o Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","audio":"0.00004","web_search":"0","internal_reasoning":"0"},"created":1755233061,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-medium-3.1","name":"Mistral: Mistral Medium 3.1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755095639,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b","name":"Baidu: ERNIE 4.5 21B A3B","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755034167,"top_provider":{"context_length":120000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-28b-a3b","name":"Baidu: ERNIE 4.5 VL 28B A3B","pricing":{"prompt":"0.00000014","completion":"0.00000056","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755032836,"top_provider":{"context_length":30000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"z-ai/glm-4.5v","name":"Z.AI: GLM 4.5V","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754922288,"top_provider":{"context_length":65536,"max_completion_tokens":16384,"is_moderated":false}},{"id":"ai21/jamba-mini-1.7","name":"AI21: Jamba Mini 1.7","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754670601,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-large-1.7","name":"AI21: Jamba Large 1.7","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754669020,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-5-chat","name":"OpenAI: GPT-5 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587837,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5","name":"OpenAI: GPT-5","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587413,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-mini","name":"OpenAI: GPT-5 Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1754587407,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-nano","name":"OpenAI: GPT-5 Nano","pricing":{"prompt":"0.00000005","completion":"0.0000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000005"},"created":1754587402,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-oss-120b","name":"OpenAI: gpt-oss-120b","pricing":{"prompt":"0.00000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-oss-20b:free","name":"OpenAI: gpt-oss-20b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-oss-20b","name":"OpenAI: gpt-oss-20b","pricing":{"prompt":"0.00000003","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-opus-4.1","name":"Anthropic: Claude Opus 4.1","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1754411591,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"mistralai/codestral-2508","name":"Mistral: Codestral 2508","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754079630,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder-30b-a3b-instruct","name":"Qwen: Qwen3 Coder 30B A3B Instruct","pricing":{"prompt":"0.00000006","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753972379,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-instruct-2507","name":"Qwen: Qwen3 30B A3B Instruct 2507","pricing":{"prompt":"0.00000008","completion":"0.00000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753806965,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"z-ai/glm-4.5","name":"Z.AI: GLM 4.5","pricing":{"prompt":"0.00000035","completion":"0.00000155","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471347,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"z-ai/glm-4.5-air:free","name":"Z.AI: GLM 4.5 Air (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"z-ai/glm-4.5-air","name":"Z.AI: GLM 4.5 Air","pricing":{"prompt":"0.00000014","completion":"0.00000086","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-thinking-2507","name":"Qwen: Qwen3 235B A22B Thinking 2507","pricing":{"prompt":"0.00000011","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753449557,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"z-ai/glm-4-32b","name":"Z.AI: GLM 4 32B ","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753376617,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder:free","name":"Qwen: Qwen3 Coder 480B A35B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder","name":"Qwen: Qwen3 Coder 480B A35B","pricing":{"prompt":"0.00000022","completion":"0.00000095","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"bytedance/ui-tars-1.5-7b","name":"ByteDance: UI-TARS 7B ","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753205056,"top_provider":{"context_length":128000,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite","name":"Google: Gemini 2.5 Flash Lite","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1753200276,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-2507","name":"Qwen: Qwen3 235B A22B Instruct 2507","pricing":{"prompt":"0.00000008","completion":"0.00000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753119555,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"switchpoint/router","name":"Switchpoint Router","pricing":{"prompt":"0.00000085","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752272899,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2:free","name":"MoonshotAI: Kimi K2 0711 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":true}},{"id":"moonshotai/kimi-k2","name":"MoonshotAI: Kimi K2 0711","pricing":{"prompt":"0.00000014","completion":"0.00000249","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":63000,"max_completion_tokens":63000,"is_moderated":false}},{"id":"thudm/glm-4.1v-9b-thinking","name":"THUDM: GLM 4.1V 9B Thinking","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752244385,"top_provider":{"context_length":65536,"max_completion_tokens":8000,"is_moderated":false}},{"id":"mistralai/devstral-medium","name":"Mistral: Devstral Medium","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752161321,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small 1.1","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752160751,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mistral-24b-venice-edition:free","name":"Venice: Uncensored (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752094966,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4","name":"xAI: Grok 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1752087689,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e2b-it:free","name":"Google: Gemma 3n 2B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752074904,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct:free","name":"Tencent: Hunyuan A13B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct","name":"Tencent: Hunyuan A13B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera:free","name":"TNG: DeepSeek R1T2 Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera","name":"TNG: DeepSeek R1T2 Chimera","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"morph/morph-v3-large","name":"Morph: Morph V3 Large","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910858,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"morph/morph-v3-fast","name":"Morph: Morph V3 Fast","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910002,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-424b-a47b","name":"Baidu: ERNIE 4.5 VL 424B A47B ","pricing":{"prompt":"0.00000042","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300903,"top_provider":{"context_length":123000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"thedrummer/anubis-70b-v1.1","name":"TheDrummer: Anubis 70B V1.1","pricing":{"prompt":"0.00000065","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751208347,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750973026,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct:free","name":"Mistral: Mistral Small 3.2 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000006","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000004","completion":"0.0000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-06-17","name":"Google: Gemini 2.5 Flash Lite Preview 06-17","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","audio":"0.0000003","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1750173831,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b:free","name":"MoonshotAI: Kimi Dev 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b","name":"MoonshotAI: Kimi Dev 72B","pricing":{"prompt":"0.00000029","completion":"0.00000115","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","request":"0","image":"0.0153","web_search":"0.01","internal_reasoning":"0"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/magistral-small-2506","name":"Mistral: Magistral Small 2506","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749569561,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506","name":"Mistral: Magistral Medium 2506","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506:thinking","name":"Mistral: Magistral Medium 2506 (thinking)","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b:free","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000004","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/devstral-small-2505:free","name":"Mistral: Devstral Small 2505 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small-2505","name":"Mistral: Devstral Small 2505","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/codex-mini","name":"OpenAI: Codex Mini","pricing":{"prompt":"0.0000015","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000375"},"created":1747409761,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"meta-llama/llama-3.3-8b-instruct:free","name":"Meta: Llama 3.3 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747230154,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"nousresearch/deephermes-3-mistral-24b-preview","name":"Nous: DeepHermes 3 Mistral 24B Preview","pricing":{"prompt":"0.00000015","completion":"0.00000059","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746830904,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-reasoning-plus","name":"Microsoft: Phi 4 Reasoning Plus","pricing":{"prompt":"0.00000007","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746130961,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746033880,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen3-4b:free","name":"Qwen: Qwen3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746031104,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-prover-v2","name":"DeepSeek: DeepSeek Prover V2","pricing":{"prompt":"0.0000005","completion":"0.00000218","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746013094,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b:free","name":"Qwen: Qwen3 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000006","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b:free","name":"Qwen: Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":128000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen3-14b:free","name":"Qwen: Qwen3 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b:free","name":"Qwen: Qwen3 235B A22B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.00000018","completion":"0.00000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera","name":"TNG: DeepSeek R1T Chimera","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"microsoft/mai-ds-r1:free","name":"Microsoft: MAI DS R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1","name":"Microsoft: MAI DS R1","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"thudm/glm-z1-32b","name":"THUDM: GLM Z1 32B","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0.00153","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"shisa-ai/shisa-v2-llama3.3-70b:free","name":"Shisa AI: Shisa V2 Llama 3.3 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"shisa-ai/shisa-v2-llama3.3-70b","name":"Shisa AI: Shisa V2 Llama 3.3 70B ","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen2.5-coder-7b-instruct","name":"Qwen: Qwen2.5 Coder 7B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744734887,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1:free","name":"ArliAI: QwQ 32B RpR v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1","name":"ArliAI: QwQ 32B RpR v1","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview:free","name":"Agentica: Deepcoder 14B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview","name":"Agentica: Deepcoder 14B Preview","pricing":{"prompt":"0.000000015","completion":"0.000000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick:free","name":"Meta: Llama 4 Maverick (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout:free","name":"Meta: Llama 4 Scout (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0.0003342","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":327680,"max_completion_tokens":16384,"is_moderated":false}},{"id":"allenai/molmo-7b-d","name":"AllenAI: Molmo 7B D","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743023247,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000024","completion":"0.00000084","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"allenai/olmo-2-0325-32b-instruct","name":"AllenAI: Olmo 2 32B Instruct","pricing":{"prompt":"0.0000002","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741988556,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000001703012","completion":"0.0000000681536","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000003","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000009","completion":"0.00000016","request":"0","image":"0.0000256","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.00000008","completion":"0.00000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.00000015","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview","name":"Nous: DeepHermes 3 Llama 3 8B Preview","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b","name":"Dolphin3.0 Mistral 24B","pricing":{"prompt":"0.00000004","completion":"0.00000017","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","audio":"0.0000007","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000008","completion":"0.00000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000005","completion":"0.00000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.00000027","completion":"0.00000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.00000003","completion":"0.00000013","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000006","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-70b-hanami-x1","name":"Sao10K: Llama 3.1 70B Hanami x1","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736302854,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.0000003","completion":"0.00000085","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.00000013","completion":"0.00000039","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":120000,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000004","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.0000025","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen: Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.00000017","completion":"0.00000043","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000005","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.00000035","completion":"0.0000004","request":"0","image":"0.0005058","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000026","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.00000009","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723939200,"top_provider":{"context_length":65000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000004","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000065","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000001","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.2","name":"Mistral: Mistral 7B Instruct v0.2","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1703721600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000054","completion":"0.00000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.000001","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700956800,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000004","completion":"0.0000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":512,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.000001125","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":2000,"is_moderated":false}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.00000045","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1689984000,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.00000005","completion":"0.00000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}]
\ No newline at end of file
diff --git a/packages/kbot/dist/main_node.js b/packages/kbot/dist/main_node.js
index d2d079bf..61a7510d 100644
--- a/packages/kbot/dist/main_node.js
+++ b/packages/kbot/dist/main_node.js
@@ -321287,17 +321287,12 @@ const main_createImage = async (prompt, options) => {
}
}
else if ('text' in part && part.text) {
- // Check if this is a rejection message
- const text = part.text.toLowerCase();
- if (text.includes('cannot fulfill') || text.includes('not able to create') ||
- text.includes('unable to generate') || text.includes('cannot generate') ||
- text.includes('cannot create') || text.includes('not appropriate')) {
- main_dist_in/* logger */.vF.error('Google AI rejected the request:', {
- rejectionMessage: part.text,
- finishReason: candidate.finishReason
- });
- throw new Error(`Request rejected by Google AI: ${part.text}`);
- }
+ // Google AI returned text instead of an image - show this to the user
+ main_dist_in/* logger */.vF.info('Google AI returned text response instead of image:', {
+ textResponse: part.text,
+ finishReason: candidate.finishReason
+ });
+ throw new Error(`Google AI response: ${part.text}`);
}
}
main_dist_in/* logger */.vF.warn('No image data found in API response parts', {
@@ -321389,19 +321384,14 @@ const main_editImage = async (prompt, imagePaths, options) => {
}
}
else if ('text' in part && part.text) {
- // Check if this is a rejection message
- const text = part.text.toLowerCase();
- if (text.includes('cannot fulfill') || text.includes('not able to create') ||
- text.includes('unable to generate') || text.includes('cannot generate') ||
- text.includes('cannot create') || text.includes('not appropriate')) {
- main_dist_in/* logger */.vF.error('Google AI rejected the image edit request:', {
- rejectionMessage: part.text,
- finishReason: candidate.finishReason,
- prompt: prompt.substring(0, 100) + '...',
- imageCount: imagePaths.length
- });
- throw new Error(`Request rejected by Google AI: ${part.text}`);
- }
+ // Google AI returned text instead of an image - show this to the user
+ main_dist_in/* logger */.vF.info('Google AI returned text response instead of image (editImage):', {
+ textResponse: part.text,
+ finishReason: candidate.finishReason,
+ prompt: prompt.substring(0, 100) + '...',
+ imageCount: imagePaths.length
+ });
+ throw new Error(`Google AI response: ${part.text}`);
}
}
main_dist_in/* logger */.vF.warn('No image data found in API response parts (editImage)', {
@@ -321424,7 +321414,7 @@ const main_editImage = async (prompt, imagePaths, options) => {
throw error; // Re-throw to let the caller handle it
}
};
-//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"images-google.js","sourceRoot":"","sources":["../../src/lib/images-google.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAQ,MAAM,uBAAuB,CAAC;AACjE,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAE9B,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,MAAM,uBAAuB,GAAG,CAAC,OAAqB,EAAE,EAAE;IACtD,MAAM,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,CAAC;IACnC,IAAI,CAAC,MAAM,EAAE,CAAC;QACV,MAAM,CAAC,KAAK,CACR,8CAA8C;YAC9C,wEAAwE,CAC3E,CAAC;QACF,OAAO,SAAS,CAAC;IACrB,CAAC;IAED,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;IAEpD,IAAI,CAAC,MAAM,EAAE,CAAC;QACV,MAAM,CAAC,KAAK,CAAC,qGAAqG,CAAC,CAAC;QACpH,OAAO,SAAS,CAAC;IACrB,CAAC;IAED,OAAO,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC;AAC1C,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,WAAW,GAAG,KAAK,EAAE,MAAc,EAAE,OAAqB,EAA0B,EAAE;IAC/F,MAAM,EAAE,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC;IAC5C,IAAI,CAAC,EAAE,EAAE,CAAC;QACN,OAAO,IAAI,CAAC;IAChB,CAAC;IAED,MAAM,KAAK,GAAG,EAAE,CAAC,kBAAkB,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,gCAAgC,EAAE,CAAC,CAAC;IAElG,IAAI,CAAC;QACD,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;QAEnD,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,mCAAmC,EAAE;YAC9C,WAAW,EAAE,CAAC,CAAC,QAAQ;YACvB,aAAa,EAAE,CAAC,CAAC,QAAQ,EAAE,UAAU;YACrC,gBAAgB,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM;YAC9C,YAAY,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;SAClD,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxE,MAAM,CAAC,KAAK,CAAC,sDAAsD,EAAE;gBACjE,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;aAC9C,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,2HAA2H,CAAC,CAAC;QACjJ,CAAC;QAED,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAEzC,2CAA2C;QAC3C,IAAI,SAAS,CAAC,YAAY,IAAI,SAAS,CAAC,YAAY,KAAK,MAAM,EAAE,CAAC;YAC9D,MAAM,oBAAoB,GAAG;gBACzB,cAAc,EAAE,wHAAwH;gBACxI,QAAQ,EAAE,yGAAyG;gBACnH,YAAY,EAAE,wHAAwH;gBACtI,OAAO,EAAE,gEAAgE;aAC5E,CAAC;YAEF,MAAM,OAAO,GAAG,oBAAoB,CAAC,SAAS,CAAC,YAAY,CAAC;gBAC7C,uCAAuC,SAAS,CAAC,YAAY,EAAE,CAAC;YAE/E,MAAM,CAAC,KAAK,CAAC,kDAAkD,EAAE;gBAC7D,YAAY,EAAE,SAAS,CAAC,YAAY;gBACpC,gBAAgB,EAAE,OAAO;gBACzB,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;aAChD,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,EAAE,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,SAAS,CAAC,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;YACjD,MAAM,CAAC,KAAK,CAAC,sDAAsD,EAAE;gBACjE,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;aAChD,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;QAC/F,CAAC;QAED,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC;QACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACvB,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;gBACvB,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC;gBACnC,IAAI,UAAU,EAAE,CAAC;oBACb,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;gBAClD,CAAC;YACL,CAAC;iBAAM,IAAI,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;gBACrC,uCAAuC;gBACvC,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;gBACrC,IAAI,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,oBAAoB,CAAC;oBACtE,IAAI,CAAC,QAAQ,CAAC,oBAAoB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,iBAAiB,CAAC;oBACvE,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,iBAAiB,CAAC,EAAE,CAAC;oBACrE,MAAM,CAAC,KAAK,CAAC,iCAAiC,EAAE;wBAC5C,gBAAgB,EAAE,IAAI,CAAC,IAAI;wBAC3B,YAAY,EAAE,SAAS,CAAC,YAAY;qBACvC,CAAC,CAAC;oBACH,MAAM,IAAI,KAAK,CAAC,kCAAkC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;gBACnE,CAAC;YACL,CAAC;QACL,CAAC;QAED,MAAM,CAAC,IAAI,CAAC,2CAA2C,EAAE;YACrD,UAAU,EAAE,KAAK,CAAC,MAAM;YACxB,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;YACrC,YAAY,EAAE,SAAS,CAAC,YAAY;SACvC,CAAC,CAAC;QACH,MAAM,IAAI,KAAK,CAAC,iFAAiF,CAAC,CAAC;IAEvG,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,qCAAqC,EAAE;YAChD,KAAK,EAAE,KAAK,CAAC,OAAO;YACpB,KAAK,EAAE,KAAK,CAAC,KAAK;YAClB,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;SAC3C,CAAC,CAAC;QACH,MAAM,KAAK,CAAC,CAAC,uCAAuC;IACxD,CAAC;AACL,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,SAAS,GAAG,KAAK,EAAE,MAAc,EAAE,UAAoB,EAAE,OAAqB,EAA0B,EAAE;IACnH,MAAM,EAAE,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC;IAC5C,IAAI,CAAC,EAAE,EAAE,CAAC;QACN,OAAO,IAAI,CAAC;IAChB,CAAC;IAED,MAAM,KAAK,GAAG,EAAE,CAAC,kBAAkB,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,gCAAgC,EAAE,CAAC,CAAC;IAElG,IAAI,CAAC;QACD,MAAM,UAAU,GAAW,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE;YAClD,MAAM,SAAS,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC7C,MAAM,WAAW,GAAG,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YACjD,MAAM,QAAQ,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,WAAW,CAAC;YAClD,OAAO;gBACH,UAAU,EAAE;oBACR,QAAQ;oBACR,IAAI,EAAE,WAAW;iBACpB;aACJ,CAAC;QACN,CAAC,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAS,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,CAAC,GAAG,UAAU,EAAE,QAAQ,CAAC,CAAC;QAE9C,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExD,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,+CAA+C,EAAE;YAC1D,WAAW,EAAE,CAAC,CAAC,QAAQ;YACvB,aAAa,EAAE,CAAC,CAAC,QAAQ,EAAE,UAAU;YACrC,gBAAgB,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM;YAC9C,YAAY,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;SAClD,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxE,MAAM,CAAC,KAAK,CAAC,kEAAkE,EAAE;gBAC7E,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC3C,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;aAChC,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,2HAA2H,CAAC,CAAC;QACjJ,CAAC;QAED,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAEzC,2CAA2C;QAC3C,IAAI,SAAS,CAAC,YAAY,IAAI,SAAS,CAAC,YAAY,KAAK,MAAM,EAAE,CAAC;YAC9D,MAAM,oBAAoB,GAAG;gBACzB,cAAc,EAAE,wHAAwH;gBACxI,QAAQ,EAAE,yGAAyG;gBACnH,YAAY,EAAE,wHAAwH;gBACtI,OAAO,EAAE,gEAAgE;aAC5E,CAAC;YAEF,MAAM,OAAO,GAAG,oBAAoB,CAAC,SAAS,CAAC,YAAY,CAAC;gBAC7C,uCAAuC,SAAS,CAAC,YAAY,EAAE,CAAC;YAE/E,MAAM,CAAC,KAAK,CAAC,qDAAqD,EAAE;gBAChE,YAAY,EAAE,SAAS,CAAC,YAAY;gBACpC,gBAAgB,EAAE,OAAO;gBACzB,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7C,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;aAChC,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,EAAE,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,SAAS,CAAC,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;YACjD,MAAM,CAAC,KAAK,CAAC,kEAAkE,EAAE;gBAC7E,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7C,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;aAChC,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;QAC/F,CAAC;QAED,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC;QACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACvB,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;gBACvB,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC;gBACnC,IAAI,UAAU,EAAE,CAAC;oBACb,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;gBAClD,CAAC;YACL,CAAC;iBAAM,IAAI,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;gBACrC,uCAAuC;gBACvC,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;gBACrC,IAAI,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,oBAAoB,CAAC;oBACtE,IAAI,CAAC,QAAQ,CAAC,oBAAoB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,iBAAiB,CAAC;oBACvE,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,iBAAiB,CAAC,EAAE,CAAC;oBACrE,MAAM,CAAC,KAAK,CAAC,4CAA4C,EAAE;wBACvD,gBAAgB,EAAE,IAAI,CAAC,IAAI;wBAC3B,YAAY,EAAE,SAAS,CAAC,YAAY;wBACpC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;wBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;qBAChC,CAAC,CAAC;oBACH,MAAM,IAAI,KAAK,CAAC,kCAAkC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;gBACnE,CAAC;YACL,CAAC;QACL,CAAC;QAED,MAAM,CAAC,IAAI,CAAC,uDAAuD,EAAE;YACjE,UAAU,EAAE,KAAK,CAAC,MAAM;YACxB,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;YACrC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;YACxC,UAAU,EAAE,UAAU,CAAC,MAAM;YAC7B,YAAY,EAAE,SAAS,CAAC,YAAY;SACvC,CAAC,CAAC;QACH,MAAM,IAAI,KAAK,CAAC,iFAAiF,CAAC,CAAC;IAEvG,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,mCAAmC,EAAE;YAC9C,KAAK,EAAE,KAAK,CAAC,OAAO;YACpB,KAAK,EAAE,KAAK,CAAC,KAAK;YAClB,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;YACxC,UAAU,EAAE,UAAU,CAAC,MAAM;YAC7B,UAAU,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC;SAC1D,CAAC,CAAC;QACH,MAAM,KAAK,CAAC,CAAC,uCAAuC;IACxD,CAAC;AACL,CAAC,CAAA"}
+//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"images-google.js","sourceRoot":"","sources":["../../src/lib/images-google.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAQ,MAAM,uBAAuB,CAAC;AACjE,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAE9B,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,MAAM,uBAAuB,GAAG,CAAC,OAAqB,EAAE,EAAE;IACtD,MAAM,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,CAAC;IACnC,IAAI,CAAC,MAAM,EAAE,CAAC;QACV,MAAM,CAAC,KAAK,CACR,8CAA8C;YAC9C,wEAAwE,CAC3E,CAAC;QACF,OAAO,SAAS,CAAC;IACrB,CAAC;IAED,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;IAEpD,IAAI,CAAC,MAAM,EAAE,CAAC;QACV,MAAM,CAAC,KAAK,CAAC,qGAAqG,CAAC,CAAC;QACpH,OAAO,SAAS,CAAC;IACrB,CAAC;IAED,OAAO,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC;AAC1C,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,WAAW,GAAG,KAAK,EAAE,MAAc,EAAE,OAAqB,EAA0B,EAAE;IAC/F,MAAM,EAAE,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC;IAC5C,IAAI,CAAC,EAAE,EAAE,CAAC;QACN,OAAO,IAAI,CAAC;IAChB,CAAC;IAED,MAAM,KAAK,GAAG,EAAE,CAAC,kBAAkB,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,gCAAgC,EAAE,CAAC,CAAC;IAElG,IAAI,CAAC;QACD,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;QAEnD,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,mCAAmC,EAAE;YAC9C,WAAW,EAAE,CAAC,CAAC,QAAQ;YACvB,aAAa,EAAE,CAAC,CAAC,QAAQ,EAAE,UAAU;YACrC,gBAAgB,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM;YAC9C,YAAY,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;SAClD,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxE,MAAM,CAAC,KAAK,CAAC,sDAAsD,EAAE;gBACjE,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;aAC9C,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,2HAA2H,CAAC,CAAC;QACjJ,CAAC;QAED,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAEzC,2CAA2C;QAC3C,IAAI,SAAS,CAAC,YAAY,IAAI,SAAS,CAAC,YAAY,KAAK,MAAM,EAAE,CAAC;YAC9D,MAAM,oBAAoB,GAAG;gBACzB,cAAc,EAAE,wHAAwH;gBACxI,QAAQ,EAAE,yGAAyG;gBACnH,YAAY,EAAE,wHAAwH;gBACtI,OAAO,EAAE,gEAAgE;aAC5E,CAAC;YAEF,MAAM,OAAO,GAAG,oBAAoB,CAAC,SAAS,CAAC,YAAY,CAAC;gBAC7C,uCAAuC,SAAS,CAAC,YAAY,EAAE,CAAC;YAE/E,MAAM,CAAC,KAAK,CAAC,kDAAkD,EAAE;gBAC7D,YAAY,EAAE,SAAS,CAAC,YAAY;gBACpC,gBAAgB,EAAE,OAAO;gBACzB,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;aAChD,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,EAAE,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,SAAS,CAAC,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;YACjD,MAAM,CAAC,KAAK,CAAC,sDAAsD,EAAE;gBACjE,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;aAChD,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;QAC/F,CAAC;QAED,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC;QACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACvB,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;gBACvB,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC;gBACnC,IAAI,UAAU,EAAE,CAAC;oBACb,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;gBAClD,CAAC;YACL,CAAC;iBAAM,IAAI,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;gBACrC,sEAAsE;gBACtE,MAAM,CAAC,IAAI,CAAC,oDAAoD,EAAE;oBAC9D,YAAY,EAAE,IAAI,CAAC,IAAI;oBACvB,YAAY,EAAE,SAAS,CAAC,YAAY;iBACvC,CAAC,CAAC;gBACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC;QACL,CAAC;QAED,MAAM,CAAC,IAAI,CAAC,2CAA2C,EAAE;YACrD,UAAU,EAAE,KAAK,CAAC,MAAM;YACxB,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;YACrC,YAAY,EAAE,SAAS,CAAC,YAAY;SACvC,CAAC,CAAC;QACH,MAAM,IAAI,KAAK,CAAC,iFAAiF,CAAC,CAAC;IAEvG,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,qCAAqC,EAAE;YAChD,KAAK,EAAE,KAAK,CAAC,OAAO;YACpB,KAAK,EAAE,KAAK,CAAC,KAAK;YAClB,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;SAC3C,CAAC,CAAC;QACH,MAAM,KAAK,CAAC,CAAC,uCAAuC;IACxD,CAAC;AACL,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,SAAS,GAAG,KAAK,EAAE,MAAc,EAAE,UAAoB,EAAE,OAAqB,EAA0B,EAAE;IACnH,MAAM,EAAE,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC;IAC5C,IAAI,CAAC,EAAE,EAAE,CAAC;QACN,OAAO,IAAI,CAAC;IAChB,CAAC;IAED,MAAM,KAAK,GAAG,EAAE,CAAC,kBAAkB,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,gCAAgC,EAAE,CAAC,CAAC;IAElG,IAAI,CAAC;QACD,MAAM,UAAU,GAAW,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE;YAClD,MAAM,SAAS,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC7C,MAAM,WAAW,GAAG,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YACjD,MAAM,QAAQ,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,WAAW,CAAC;YAClD,OAAO;gBACH,UAAU,EAAE;oBACR,QAAQ;oBACR,IAAI,EAAE,WAAW;iBACpB;aACJ,CAAC;QACN,CAAC,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAS,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,CAAC,GAAG,UAAU,EAAE,QAAQ,CAAC,CAAC;QAE9C,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExD,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,+CAA+C,EAAE;YAC1D,WAAW,EAAE,CAAC,CAAC,QAAQ;YACvB,aAAa,EAAE,CAAC,CAAC,QAAQ,EAAE,UAAU;YACrC,gBAAgB,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM;YAC9C,YAAY,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;SAClD,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxE,MAAM,CAAC,KAAK,CAAC,kEAAkE,EAAE;gBAC7E,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC3C,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;aAChC,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,2HAA2H,CAAC,CAAC;QACjJ,CAAC;QAED,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAEzC,2CAA2C;QAC3C,IAAI,SAAS,CAAC,YAAY,IAAI,SAAS,CAAC,YAAY,KAAK,MAAM,EAAE,CAAC;YAC9D,MAAM,oBAAoB,GAAG;gBACzB,cAAc,EAAE,wHAAwH;gBACxI,QAAQ,EAAE,yGAAyG;gBACnH,YAAY,EAAE,wHAAwH;gBACtI,OAAO,EAAE,gEAAgE;aAC5E,CAAC;YAEF,MAAM,OAAO,GAAG,oBAAoB,CAAC,SAAS,CAAC,YAAY,CAAC;gBAC7C,uCAAuC,SAAS,CAAC,YAAY,EAAE,CAAC;YAE/E,MAAM,CAAC,KAAK,CAAC,qDAAqD,EAAE;gBAChE,YAAY,EAAE,SAAS,CAAC,YAAY;gBACpC,gBAAgB,EAAE,OAAO;gBACzB,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7C,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;aAChC,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,EAAE,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,CAAC,SAAS,CAAC,OAAO,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;YACjD,MAAM,CAAC,KAAK,CAAC,kEAAkE,EAAE;gBAC7E,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7C,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;aAChC,CAAC,CAAC;YACH,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;QAC/F,CAAC;QAED,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC;QACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACvB,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;gBACvB,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC;gBACnC,IAAI,UAAU,EAAE,CAAC;oBACb,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;gBAClD,CAAC;YACL,CAAC;iBAAM,IAAI,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;gBACrC,sEAAsE;gBACtE,MAAM,CAAC,IAAI,CAAC,gEAAgE,EAAE;oBAC1E,YAAY,EAAE,IAAI,CAAC,IAAI;oBACvB,YAAY,EAAE,SAAS,CAAC,YAAY;oBACpC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;oBACxC,UAAU,EAAE,UAAU,CAAC,MAAM;iBAChC,CAAC,CAAC;gBACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC;QACL,CAAC;QAED,MAAM,CAAC,IAAI,CAAC,uDAAuD,EAAE;YACjE,UAAU,EAAE,KAAK,CAAC,MAAM;YACxB,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;YACrC,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;YACxC,UAAU,EAAE,UAAU,CAAC,MAAM;YAC7B,YAAY,EAAE,SAAS,CAAC,YAAY;SACvC,CAAC,CAAC;QACH,MAAM,IAAI,KAAK,CAAC,iFAAiF,CAAC,CAAC;IAEvG,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,mCAAmC,EAAE;YAC9C,KAAK,EAAE,KAAK,CAAC,OAAO;YACpB,KAAK,EAAE,KAAK,CAAC,KAAK;YAClB,MAAM,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;YACxC,UAAU,EAAE,UAAU,CAAC,MAAM;YAC7B,UAAU,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC;SAC1D,CAAC,CAAC;QACH,MAAM,KAAK,CAAC,CAAC,uCAAuC;IACxD,CAAC;AACL,CAAC,CAAA"}
// EXTERNAL MODULE: ./dist-in/prompt.js + 6 modules
var main_dist_in_prompt = __webpack_require__(31321);
// EXTERNAL MODULE: external "node:child_process"
@@ -321916,7 +321906,7 @@ const main_imageCommand = async (argv) => {
logger.error('Failed to parse options or generate image:', error.message, error.issues, error.stack);
}
};
-//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"images.js","sourceRoot":"","sources":["../../src/commands/images.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAClC,OAAO,EAAE,IAAI,IAAI,KAAK,EAAE,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACH,YAAY,EACZ,QAAQ,EACR,UAAU,EACb,MAAM,SAAS,CAAC;AACjB,OAAO,EAAW,MAAM,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAE9D,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EAAE,MAAM,IAAI,aAAa,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAC3C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C,SAAS,sBAAsB,CAAC,GAAuB,EAAE,QAAkB;IACvE,IAAI,MAAc,CAAC;IAEnB,IAAI,GAAG,EAAE,CAAC;QACN,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACnE,IAAI,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YACnC,MAAM,GAAG,WAAW,CAAC;QACzB,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;QACvC,CAAC;IACL,CAAC;SAAM,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC7B,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACvC,CAAC;SAAM,CAAC;QACJ,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,kCAAkC;IAC9D,CAAC;IAED,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,GAAG,CAAC,CAAC;IAEV,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtB,MAAM,gBAAgB,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/E,MAAM,KAAK,GAAG,gBAAgB,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;QACpD,IAAI,KAAK,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;YACvB,YAAY,GAAG,gBAAgB,CAAC,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;YAC1D,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC;QACnC,CAAC;aAAM,CAAC;YACJ,YAAY,GAAG,gBAAgB,CAAC;QACpC,CAAC;IACL,CAAC;SAAM,CAAC;QACJ,YAAY,GAAG,WAAW,CAAC;IAC/B,CAAC;IAED,IAAI,WAAW,CAAC;IAChB,IAAI,YAAY,CAAC;IACjB,GAAG,CAAC;QACA,WAAW,GAAG,GAAG,YAAY,QAAQ,CAAC,MAAM,CAAC;QAC7C,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;QACjD,CAAC,EAAE,CAAC;IACR,CAAC,QAAQ,MAAM,CAAC,YAAY,CAAC,EAAE;IAE/B,OAAO,YAAY,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IAElB,sEAAsE;IACtE,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;IAClE,oFAAoF;IACpF,MAAM,cAAc,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAC5E,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC;QACxB,CAAC,CAAC,SAAS,CAAC;IAEZ,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAEjE,+DAA+D;IAC/D,IAAI,WAAmB,CAAC;IACxB,IAAI,cAAsB,CAAC;IAE3B,QAAQ,OAAO,CAAC,QAAQ,EAAE,CAAC;QACvB,KAAK,OAAO;YACR,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,eAAe,CAAC;YACjC,MAAM;QACV,KAAK,QAAQ;YACT,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV,KAAK,OAAO;YACR,WAAW,GAAG,UAAU,CAAC;YACzB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV;YACI,MAAM,IAAI,KAAK,CAAC,yBAAyB,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrE,CAAC;IAED,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,cAAc,CAAC,CAAC;AACvE,CAAC;AAED,MAAM,CAAC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IACnC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC,IAAI,CAAC;QACpC,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI;QACX,QAAQ,EAAE,IAAI;QACd,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;KACZ,CAAC,CAAC;IAEH,OAAO,UAAU,CAAC,MAAM,CAAC;QACrB,GAAG,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,0BAA0B,CAAC;QAChE,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,gCAAgC,CAAC,CAAC,QAAQ,CAAC,+CAA+C,CAAC;QACrH,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,kDAAkD,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,2CAA2C,CAAC;KACtF,CAAC,CAAC;AACP,CAAC,CAAA;AAED,KAAK,UAAU,qBAAqB,CAAC,IAAS;IAC1C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU;QAC/B,QAAQ,EAAE,CAAC,EAAE,8BAA8B;QAC3C,iBAAiB,EAAE,wEAAwE;KAC9F,CAAC,CAAC;IAEH,OAAO,IAAI,OAAO,CAAC,CAAC,QAAQ,EAAE,MAAM,EAAE,EAAE;QACpC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC;YACtB,OAAO,MAAM,CAAC,IAAI,KAAK,CAAC,iCAAiC,UAAU,8EAA8E,CAAC,CAAC,CAAC;QACxJ,CAAC;QAED,wBAAwB;QACxB,MAAM,IAAI,GAAa,EAAE,CAAC;QAE1B,oBAAoB;QACpB,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAC7E,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,IAAI,CAAC,GAAG,gBAAgB,CAAC,CAAC;QACnC,CAAC;QAED,cAAc;QACd,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC;YACT,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QACnC,CAAC;QAED,UAAU;QACV,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC;QACjC,CAAC;QAED,aAAa;QACb,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACd,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,CAAC;QAED,MAAM,YAAY,GAAG,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC;QAElF,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,WAAW,GAAG,EAAE,CAAC;QAErB,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YAC1C,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAE9B,yCAAyC;YACzC,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAC5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACjC,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBACpC,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBAEnD,2CAA2C;wBAC3C,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;wBAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;wBACnD,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;wBACnG,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;wBAE5D,MAAM,cAAc,GAAG;4BACnB,GAAG,EAAE,4BAA4B;4BACjC,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,IAAI;4BAC3B,GAAG,EAAE,IAAI,CAAC,GAAG,IAAI,IAAI;4BACrB,MAAM,EAAE,MAAM,IAAI,IAAI;4BACtB,KAAK,EAAE,gBAAgB;yBAC1B,CAAC;wBAEF,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;wBAClD,MAAM,CAAC,IAAI,CAAC,6BAA6B,EAAE,UAAU,CAAC,CAAC;wBACvD,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,gCAAgC,EAAE,cAAc,CAAC,CAAC;wBAE9D,kBAAkB;wBAClB,KAAK,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;4BACvC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oCACpB,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;oCAC5C,MAAM,MAAM,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;oCAC9C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;oCAC/F,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;oCAE1C,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,2BAA2B;wCAChC,MAAM;wCACN,QAAQ;wCACR,QAAQ,EAAE,SAAS;qCACtB,CAAC;oCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;oCAChE,MAAM,CAAC,IAAI,CAAC,uBAAuB,QAAQ,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,GAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gCACzF,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,yBAAyB,SAAS,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;4BACtE,CAAC;wBACL,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBAC3C,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBACnD,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;wBAClC,IAAI,YAAY,IAAI,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;4BACzC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC;oCACvB,UAAU,CAAC,YAAY,CAAC,CAAC;oCACzB,MAAM,CAAC,IAAI,CAAC,gCAAgC,YAAY,EAAE,CAAC,CAAC;oCAC5D,MAAM,eAAe,GAAG;wCACpB,GAAG,EAAE,2BAA2B;wCAChC,IAAI,EAAE,YAAY;qCACrB,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC;gCACtE,CAAC;qCAAM,CAAC;oCACJ,MAAM,CAAC,IAAI,CAAC,mCAAmC,YAAY,EAAE,CAAC,CAAC;oCAC/D,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,qBAAqB;wCAC1B,IAAI,EAAE,YAAY;wCAClB,KAAK,EAAE,2BAA2B;qCACrC,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCACpE,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,4BAA4B,YAAY,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;gCACxE,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,qBAAqB;oCAC1B,IAAI,EAAE,YAAY;oCAClB,KAAK,EAAE,KAAK,CAAC,OAAO;iCACvB,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;6BAAM,CAAC;4BACJ,MAAM,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;wBACtE,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,kBAAkB,EAAE,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;wBAEvD,yDAAyD;wBACzD,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC;wBACjC,MAAM,QAAQ,GAAG,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC;wBACrC,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;wBAE3B,2EAA2E;wBAC3E,IAAI,CAAC;4BAED,MAAM,YAAY,GAAG,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;4BAC9D,MAAM,CAAC,IAAI,CAAC,uDAAuD,YAAY,EAAE,CAAC,CAAC;4BAEnF,MAAM,CAAC,IAAI,CAAC,kCAAkC,SAAS,GAAG,CAAC,CAAC;4BAE5D,IAAI,WAAW,GAAkB,IAAI,CAAC;4BAEtC,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gCACtB,gBAAgB;gCAChB,MAAM,CAAC,IAAI,CAAC,qBAAqB,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,SAAS,GAAG,CAAC,CAAC;gCACrF,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,IAAI;oCACP,MAAM,EAAE,SAAS;oCACjB,OAAO,EAAE,QAAQ;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,SAAS,CAAC,SAAS,EAAE,QAAQ,EAAE,aAAa,CAAC,CAAC;4BACtE,CAAC;iCAAM,CAAC;gCACJ,iBAAiB;gCACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,SAAS,GAAG,CAAC,CAAC;gCAC1D,MAAM,YAAY,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;gCACjC,OAAO,YAAY,CAAC,OAAO,CAAC;gCAC5B,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,YAAY;oCACf,MAAM,EAAE,SAAS;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;4BAC9D,CAAC;4BAED,IAAI,WAAW,EAAE,CAAC;gCACd,KAAK,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;gCACjC,MAAM,CAAC,IAAI,CAAC,qBAAqB,YAAY,EAAE,CAAC,CAAC;gCAEjD,uDAAuD;gCACvD,MAAM,YAAY,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;gCAEpD,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,2BAA2B;oCAChC,MAAM,EAAE,YAAY;oCACpB,QAAQ,EAAE,WAAW;oCACrB,QAAQ,EAAE,YAAY;iCACzB,CAAC;gCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCAChE,MAAM,CAAC,IAAI,CAAC,kCAAkC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;4BACjF,CAAC;iCAAM,CAAC;gCACJ,MAAM,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;gCAE3C,yBAAyB;gCACzB,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,kBAAkB;oCACvB,KAAK,EAAE,0BAA0B;iCACpC,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;wBAAC,OAAO,KAAK,EAAE,CAAC;4BACb,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;4BAC5E,MAAM,UAAU,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;4BAEpE,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE,KAAK,EAAC,YAAY,CAAC,CAAC;4BAExD,MAAM,CAAC,KAAK,CAAC,qBAAqB,EAAE;gCAChC,OAAO,EAAE,YAAY;gCACrB,KAAK,EAAE,UAAU;gCACjB,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;gCAChC,KAAK,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;6BAC9C,CAAC,CAAC;4BAEH,kCAAkC;4BAClC,MAAM,aAAa,GAAG;gCAClB,GAAG,EAAE,kBAAkB;gCACvB,KAAK,EAAE,YAAY;gCACnB,OAAO,EAAE;oCACL,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;oCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;oCAChC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;iCACtC;6BACJ,CAAC;4BACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;wBACpE,CAAC;oBACL,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,4CAA4C;oBAC5C,MAAM,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;oBACvD,MAAM,IAAI,IAAI,GAAG,IAAI,CAAC;gBAC1B,CAAC;YACL,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;YACpC,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAE5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACpC,IAAI,UAAU,CAAC,KAAK,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;wBACzC,qCAAqC;wBAErC,gCAAgC;wBAChC,IAAI,UAAU,CAAC,OAAO,KAAK,mCAAmC;4BAC1D,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACtE,OAAO,CAAC,wBAAwB;wBACpC,CAAC;wBAED,wCAAwC;wBACxC,IAAI,UAAU,CAAC,OAAO,KAAK,wBAAwB,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC;4BAC9E,IAAI,CAAC;gCACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gCACpD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oCACd,MAAM,CAAC,IAAI,CAAC,gBAAgB,OAAO,CAAC,GAAG,EAAE,EAAE;wCACvC,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,SAAS;wCACrH,GAAG,EAAE,OAAO,CAAC,GAAG;wCAChB,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC,CAAC,SAAS;wCAC1E,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM;qCAC9B,CAAC,CAAC;oCACH,OAAO;gCACX,CAAC;4BACL,CAAC;4BAAC,OAAO,CAAC,EAAE,CAAC;gCACT,kCAAkC;4BACtC,CAAC;wBACL,CAAC;wBAED,QAAQ,UAAU,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;4BACrC,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV;gCACI,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjE,CAAC;oBACL,CAAC;yBAAM,CAAC;wBACJ,yDAAyD;wBACzD,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,kEAAkE;oBAClE,IAAI,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;wBAC9B,mCAAmC;wBACnC,IAAI,IAAI,CAAC,QAAQ,CAAC,kCAAkC,CAAC;4BACjD,IAAI,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACxD,OAAO,CAAC,aAAa;wBACzB,CAAC;wBACD,wDAAwD;wBACxD,IAAI,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,sBAAsB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;4BACzG,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,mBAAmB,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;4BAClF,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;wBACnC,CAAC;oBACL,CAAC;yBAAM,IAAI,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC;wBACrB,gCAAgC;wBAChC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;YACL,CAAC;YACD,WAAW,IAAI,KAAK,CAAC;QACzB,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,EAAE;YAC9B,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,IAAI,CAAC,CAAC;YACnD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;YACrD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAC;YAE1D,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACb,MAAM,aAAa,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;gBACpC,MAAM,CAAC,IAAI,CAAC,2BAA2B,EAAE,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC;gBACxE,QAAQ,CAAC,aAAa,IAAI,IAAI,CAAC,CAAC;YACpC,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,KAAK,CAAC,8BAA8B,IAAI,aAAa,WAAW,EAAE,CAAC,CAAC,CAAC;YACpF,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YAC7B,MAAM,CAAC,GAAG,CAAC,CAAC;QAChB,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC;AAGD,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,IAAS,EAAE,EAAE;IAC5C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,CAAC,CAAC;IAErE,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,IAAI,CAAC;YACD,MAAM,SAAS,GAAG,MAAM,qBAAqB,CAAC,IAAI,CAAC,CAAC;YACpD,IAAI,SAAS,EAAE,CAAC;gBACZ,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;gBACtC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;gBAC7B,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAC5C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;gBACjC,CAAC;gBACD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oBACd,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;gBAC3B,CAAC;YACL,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,CAAC,wCAAwC,CAAC,CAAC;gBACtD,OAAO;YACX,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,CAAC,KAAK,CAAC,oBAAoB,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;YAClD,OAAO;QACX,CAAC;IACL,CAAC;IAED,IAAI,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAED,IAAI,CAAC;QACD,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,aAAa,CAAC;QAEhD,MAAM,aAAa,GAAG,MAAM,aAAa,CAAC,aAAa,CAAC,CAAC;QACzD,MAAM,MAAM,GAAG,aAAa,EAAE,OAAiB,IAAI,EAAE,CAAC;QAEtD,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;YACtB,MAAM,CAAC,KAAK,CAAC,yFAAyF,CAAC,CAAC;YACxG,OAAO;QACX,CAAC;QAED,IAAI,CAAC,GAAG,EAAE,CAAC;YACP,MAAM,CAAC,KAAK,CAAC,oDAAoD,CAAC,CAAC;YACnE,OAAO;QACX,CAAC;QAED,IAAI,WAAW,GAAkB,IAAI,CAAC;QAEtC,IAAI,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpD,gBAAgB;YAChB,KAAK,MAAM,SAAS,IAAI,OAAO,EAAE,CAAC;gBAC9B,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oBACrB,MAAM,CAAC,KAAK,CAAC,6BAA6B,SAAS,EAAE,CAAC,CAAC;oBACvD,OAAO;gBACX,CAAC;YACL,CAAC;YACD,IAAI,CAAC,MAAM,EAAE,CAAC;gBACV,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;gBACxD,OAAO;YACX,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,qBAAqB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,MAAM,GAAG,CAAC,CAAC;YACjF,WAAW,GAAG,MAAM,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,MAAM,EAAE,CAAC;YAChB,iBAAiB;YACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,MAAM,GAAG,CAAC,CAAC;YACvD,WAAW,GAAG,MAAM,WAAW,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,WAAW,EAAE,CAAC;YACd,MAAM,IAAI,GAAG,SAAS,CAAC,aAAa,CAAC,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;YACpE,KAAK,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YAC5B,MAAM,CAAC,IAAI,CAAC,mBAAmB,OAAO,EAAE,CAAC,CAAC;QAC9C,CAAC;aAAM,CAAC;YACJ,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QAC9C,CAAC;IAEL,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,4CAA4C,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACzG,CAAC;AACL,CAAC,CAAC"}
+//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"images.js","sourceRoot":"","sources":["../../src/commands/images.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAC;AAClC,OAAO,EAAE,IAAI,IAAI,KAAK,EAAE,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACH,YAAY,EACZ,QAAQ,EACR,UAAU,EACb,MAAM,SAAS,CAAC;AACjB,OAAO,EAAW,MAAM,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5C,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAE5C,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAE9D,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EAAE,MAAM,IAAI,aAAa,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAC3C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C,SAAS,sBAAsB,CAAC,GAAuB,EAAE,QAAkB;IACvE,IAAI,MAAc,CAAC;IAEnB,IAAI,GAAG,EAAE,CAAC;QACN,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACnE,IAAI,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YACnC,MAAM,GAAG,WAAW,CAAC;QACzB,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;QACvC,CAAC;IACL,CAAC;SAAM,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC7B,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACvC,CAAC;SAAM,CAAC;QACJ,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,kCAAkC;IAC9D,CAAC;IAED,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,GAAG,CAAC,CAAC;IAEV,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtB,MAAM,gBAAgB,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/E,MAAM,KAAK,GAAG,gBAAgB,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;QACpD,IAAI,KAAK,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;YACvB,YAAY,GAAG,gBAAgB,CAAC,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;YAC1D,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC;QACnC,CAAC;aAAM,CAAC;YACJ,YAAY,GAAG,gBAAgB,CAAC;QACpC,CAAC;IACL,CAAC;SAAM,CAAC;QACJ,YAAY,GAAG,WAAW,CAAC;IAC/B,CAAC;IAED,IAAI,WAAW,CAAC;IAChB,IAAI,YAAY,CAAC;IACjB,GAAG,CAAC;QACA,WAAW,GAAG,GAAG,YAAY,QAAQ,CAAC,MAAM,CAAC;QAC7C,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;QACjD,CAAC,EAAE,CAAC;IACR,CAAC,QAAQ,MAAM,CAAC,YAAY,CAAC,EAAE;IAE/B,OAAO,YAAY,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IAElB,sEAAsE;IACtE,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;IAClE,oFAAoF;IACpF,MAAM,cAAc,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAC5E,CAAC,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC;QACxB,CAAC,CAAC,SAAS,CAAC;IAEZ,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAEjE,+DAA+D;IAC/D,IAAI,WAAmB,CAAC;IACxB,IAAI,cAAsB,CAAC;IAE3B,QAAQ,OAAO,CAAC,QAAQ,EAAE,CAAC;QACvB,KAAK,OAAO;YACR,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,eAAe,CAAC;YACjC,MAAM;QACV,KAAK,QAAQ;YACT,WAAW,GAAG,QAAQ,CAAC;YACvB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV,KAAK,OAAO;YACR,WAAW,GAAG,UAAU,CAAC;YACzB,cAAc,GAAG,WAAW,CAAC;YAC7B,MAAM;QACV;YACI,MAAM,IAAI,KAAK,CAAC,yBAAyB,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrE,CAAC;IAED,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,cAAc,CAAC,CAAC;AACvE,CAAC;AAED,MAAM,CAAC,MAAM,kBAAkB,GAAG,GAAG,EAAE;IACnC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC,IAAI,CAAC;QACpC,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI;QACX,QAAQ,EAAE,IAAI;QACd,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;QACb,GAAG,EAAE,IAAI;KACZ,CAAC,CAAC;IAEH,OAAO,UAAU,CAAC,MAAM,CAAC;QACrB,GAAG,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,0BAA0B,CAAC;QAChE,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,gCAAgC,CAAC,CAAC,QAAQ,CAAC,+CAA+C,CAAC;QACrH,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,kDAAkD,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,2CAA2C,CAAC;KACtF,CAAC,CAAC;AACP,CAAC,CAAA;AAED,KAAK,UAAU,qBAAqB,CAAC,IAAS;IAC1C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU;QAC/B,QAAQ,EAAE,CAAC,EAAE,8BAA8B;QAC3C,iBAAiB,EAAE,wEAAwE;KAC9F,CAAC,CAAC;IAEH,OAAO,IAAI,OAAO,CAAC,CAAC,QAAQ,EAAE,MAAM,EAAE,EAAE;QACpC,MAAM,UAAU,GAAG,aAAa,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC;YACtB,OAAO,MAAM,CAAC,IAAI,KAAK,CAAC,iCAAiC,UAAU,8EAA8E,CAAC,CAAC,CAAC;QACxJ,CAAC;QAED,wBAAwB;QACxB,MAAM,IAAI,GAAa,EAAE,CAAC;QAE1B,oBAAoB;QACpB,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAC7E,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,IAAI,CAAC,GAAG,gBAAgB,CAAC,CAAC;QACnC,CAAC;QAED,cAAc;QACd,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC;YACT,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QACnC,CAAC;QAED,UAAU;QACV,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC;QACjC,CAAC;QAED,aAAa;QACb,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACd,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,CAAC;QAED,MAAM,YAAY,GAAG,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC;QAElF,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,WAAW,GAAG,EAAE,CAAC;QAErB,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YAC1C,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAE9B,yCAAyC;YACzC,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAC5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACjC,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBACpC,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBAEnD,2CAA2C;wBAC3C,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;wBAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,IAAI,MAAM,EAAE,MAAM,EAAE,GAAG,CAAC;wBACnD,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;wBACnG,MAAM,gBAAgB,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;wBAE5D,MAAM,cAAc,GAAG;4BACnB,GAAG,EAAE,4BAA4B;4BACjC,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,IAAI;4BAC3B,GAAG,EAAE,IAAI,CAAC,GAAG,IAAI,IAAI;4BACrB,MAAM,EAAE,MAAM,IAAI,IAAI;4BACtB,KAAK,EAAE,gBAAgB;yBAC1B,CAAC;wBAEF,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;wBAClD,MAAM,CAAC,IAAI,CAAC,6BAA6B,EAAE,UAAU,CAAC,CAAC;wBACvD,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,gCAAgC,EAAE,cAAc,CAAC,CAAC;wBAE9D,kBAAkB;wBAClB,KAAK,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;4BACvC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oCACpB,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;oCAC5C,MAAM,MAAM,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;oCAC9C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;oCAC/F,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;oCAE1C,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,2BAA2B;wCAChC,MAAM;wCACN,QAAQ;wCACR,QAAQ,EAAE,SAAS;qCACtB,CAAC;oCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;oCAChE,MAAM,CAAC,IAAI,CAAC,uBAAuB,QAAQ,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,GAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gCACzF,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,yBAAyB,SAAS,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;4BACtE,CAAC;wBACL,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBAC3C,MAAM,CAAC,IAAI,CAAC,qCAAqC,CAAC,CAAC;wBACnD,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;wBAClC,IAAI,YAAY,IAAI,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;4BACzC,IAAI,CAAC;gCACD,IAAI,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC;oCACvB,UAAU,CAAC,YAAY,CAAC,CAAC;oCACzB,MAAM,CAAC,IAAI,CAAC,gCAAgC,YAAY,EAAE,CAAC,CAAC;oCAC5D,MAAM,eAAe,GAAG;wCACpB,GAAG,EAAE,2BAA2B;wCAChC,IAAI,EAAE,YAAY;qCACrB,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC;gCACtE,CAAC;qCAAM,CAAC;oCACJ,MAAM,CAAC,IAAI,CAAC,mCAAmC,YAAY,EAAE,CAAC,CAAC;oCAC/D,MAAM,aAAa,GAAG;wCAClB,GAAG,EAAE,qBAAqB;wCAC1B,IAAI,EAAE,YAAY;wCAClB,KAAK,EAAE,2BAA2B;qCACrC,CAAC;oCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCACpE,CAAC;4BACL,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACb,MAAM,CAAC,KAAK,CAAC,4BAA4B,YAAY,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;gCACxE,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,qBAAqB;oCAC1B,IAAI,EAAE,YAAY;oCAClB,KAAK,EAAE,KAAK,CAAC,OAAO;iCACvB,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;6BAAM,CAAC;4BACJ,MAAM,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;wBACtE,CAAC;oBACL,CAAC;yBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,kBAAkB,EAAE,CAAC;wBAC7C,MAAM,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;wBAEvD,yDAAyD;wBACzD,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC;wBACjC,MAAM,QAAQ,GAAG,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC;wBACrC,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;wBAE3B,2EAA2E;wBAC3E,IAAI,CAAC;4BAED,MAAM,YAAY,GAAG,sBAAsB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;4BAC9D,MAAM,CAAC,IAAI,CAAC,uDAAuD,YAAY,EAAE,CAAC,CAAC;4BAEnF,MAAM,CAAC,IAAI,CAAC,kCAAkC,SAAS,GAAG,CAAC,CAAC;4BAE5D,IAAI,WAAW,GAAkB,IAAI,CAAC;4BAEtC,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gCACtB,gBAAgB;gCAChB,MAAM,CAAC,IAAI,CAAC,qBAAqB,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,SAAS,GAAG,CAAC,CAAC;gCACrF,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,IAAI;oCACP,MAAM,EAAE,SAAS;oCACjB,OAAO,EAAE,QAAQ;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,SAAS,CAAC,SAAS,EAAE,QAAQ,EAAE,aAAa,CAAC,CAAC;4BACtE,CAAC;iCAAM,CAAC;gCACJ,iBAAiB;gCACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,SAAS,GAAG,CAAC,CAAC;gCAC1D,MAAM,YAAY,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;gCACjC,OAAO,YAAY,CAAC,OAAO,CAAC;gCAC5B,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC;oCAC7C,GAAG,YAAY;oCACf,MAAM,EAAE,SAAS;oCACjB,GAAG,EAAE,YAAY,CAAC,mBAAmB;iCACxC,CAAC,CAAC;gCACH,WAAW,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;4BAC9D,CAAC;4BAED,IAAI,WAAW,EAAE,CAAC;gCACd,KAAK,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;gCACjC,MAAM,CAAC,IAAI,CAAC,qBAAqB,YAAY,EAAE,CAAC,CAAC;gCAEjD,uDAAuD;gCACvD,MAAM,YAAY,GAAG,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;gCAEpD,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,2BAA2B;oCAChC,MAAM,EAAE,YAAY;oCACpB,QAAQ,EAAE,WAAW;oCACrB,QAAQ,EAAE,YAAY;iCACzB,CAAC;gCAEF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;gCAChE,MAAM,CAAC,IAAI,CAAC,kCAAkC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;4BACjF,CAAC;iCAAM,CAAC;gCACJ,MAAM,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;gCAE3C,yBAAyB;gCACzB,MAAM,aAAa,GAAG;oCAClB,GAAG,EAAE,kBAAkB;oCACvB,KAAK,EAAE,0BAA0B;iCACpC,CAAC;gCACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;4BACpE,CAAC;wBACL,CAAC;wBAAC,OAAO,KAAK,EAAE,CAAC;4BACb,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;4BAC5E,MAAM,UAAU,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;4BAEpE,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE,KAAK,EAAC,YAAY,CAAC,CAAC;4BAExD,MAAM,CAAC,KAAK,CAAC,qBAAqB,EAAE;gCAChC,OAAO,EAAE,YAAY;gCACrB,KAAK,EAAE,UAAU;gCACjB,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;gCAChC,KAAK,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;6BAC9C,CAAC,CAAC;4BAEH,kCAAkC;4BAClC,MAAM,aAAa,GAAG;gCAClB,GAAG,EAAE,kBAAkB;gCACvB,KAAK,EAAE,YAAY;gCACnB,OAAO,EAAE;oCACL,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;oCAC5C,SAAS,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;oCAChC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;iCACtC;6BACJ,CAAC;4BACF,YAAY,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC;wBACpE,CAAC;oBACL,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,4CAA4C;oBAC5C,MAAM,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;oBACvD,MAAM,IAAI,IAAI,GAAG,IAAI,CAAC;gBAC1B,CAAC;YACL,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;YACpC,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAE5D,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACvB,IAAI,CAAC;oBACD,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBACpC,IAAI,UAAU,CAAC,KAAK,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;wBACzC,qCAAqC;wBAErC,gCAAgC;wBAChC,IAAI,UAAU,CAAC,OAAO,KAAK,mCAAmC;4BAC1D,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACtE,OAAO,CAAC,wBAAwB;wBACpC,CAAC;wBAED,wCAAwC;wBACxC,IAAI,UAAU,CAAC,OAAO,KAAK,wBAAwB,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC;4BAC9E,IAAI,CAAC;gCACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gCACpD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oCACd,MAAM,CAAC,IAAI,CAAC,gBAAgB,OAAO,CAAC,GAAG,EAAE,EAAE;wCACvC,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,SAAS;wCACrH,GAAG,EAAE,OAAO,CAAC,GAAG;wCAChB,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC,CAAC,SAAS;wCAC1E,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM;qCAC9B,CAAC,CAAC;oCACH,OAAO;gCACX,CAAC;4BACL,CAAC;4BAAC,OAAO,CAAC,EAAE,CAAC;gCACT,kCAAkC;4BACtC,CAAC;wBACL,CAAC;wBAED,QAAQ,UAAU,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;4BACrC,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,MAAM;gCACP,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCACzD,MAAM;4BACV,KAAK,OAAO;gCACR,MAAM,CAAC,KAAK,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;gCAC1D,MAAM;4BACV;gCACI,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,OAAO,EAAE,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjE,CAAC;oBACL,CAAC;yBAAM,CAAC;wBACJ,yDAAyD;wBACzD,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACT,kEAAkE;oBAClE,IAAI,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;wBAC9B,mCAAmC;wBACnC,IAAI,IAAI,CAAC,QAAQ,CAAC,kCAAkC,CAAC;4BACjD,IAAI,CAAC,QAAQ,CAAC,sCAAsC,CAAC,EAAE,CAAC;4BACxD,OAAO,CAAC,aAAa;wBACzB,CAAC;wBACD,wDAAwD;wBACxD,IAAI,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,sBAAsB,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;4BACzG,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,mBAAmB,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;4BAClF,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;wBACnC,CAAC;oBACL,CAAC;yBAAM,IAAI,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC;wBACrB,gCAAgC;wBAChC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC5B,CAAC;gBACL,CAAC;YACL,CAAC;YACD,WAAW,IAAI,KAAK,CAAC;QACzB,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,EAAE;YAC9B,MAAM,CAAC,IAAI,CAAC,+BAA+B,EAAE,IAAI,CAAC,CAAC;YACnD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;YACrD,MAAM,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAC;YAE1D,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACb,MAAM,aAAa,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;gBACpC,MAAM,CAAC,IAAI,CAAC,2BAA2B,EAAE,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC;gBACxE,QAAQ,CAAC,aAAa,IAAI,IAAI,CAAC,CAAC;YACpC,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,KAAK,CAAC,8BAA8B,IAAI,aAAa,WAAW,EAAE,CAAC,CAAC,CAAC;YACpF,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YAC7B,MAAM,CAAC,GAAG,CAAC,CAAC;QAChB,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,IAAS,EAAE,EAAE;IAC5C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAU,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,CAAC,CAAC;IAErE,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;QACX,IAAI,CAAC;YACD,MAAM,SAAS,GAAG,MAAM,qBAAqB,CAAC,IAAI,CAAC,CAAC;YACpD,IAAI,SAAS,EAAE,CAAC;gBACZ,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;gBACtC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;gBAC7B,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAC5C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;gBACjC,CAAC;gBACD,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;oBACd,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;gBAC3B,CAAC;YACL,CAAC;iBAAM,CAAC;gBACJ,MAAM,CAAC,IAAI,CAAC,wCAAwC,CAAC,CAAC;gBACtD,OAAO;YACX,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,CAAC,KAAK,CAAC,oBAAoB,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;YAClD,OAAO;QACX,CAAC;IACL,CAAC;IAED,IAAI,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAED,IAAI,CAAC;QACD,MAAM,aAAa,GAAG,kBAAkB,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACvD,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,aAAa,CAAC;QAEhD,MAAM,aAAa,GAAG,MAAM,aAAa,CAAC,aAAa,CAAC,CAAC;QACzD,MAAM,MAAM,GAAG,aAAa,EAAE,OAAiB,IAAI,EAAE,CAAC;QAEtD,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;YACtB,MAAM,CAAC,KAAK,CAAC,yFAAyF,CAAC,CAAC;YACxG,OAAO;QACX,CAAC;QAED,IAAI,CAAC,GAAG,EAAE,CAAC;YACP,MAAM,CAAC,KAAK,CAAC,oDAAoD,CAAC,CAAC;YACnE,OAAO;QACX,CAAC;QAED,IAAI,WAAW,GAAkB,IAAI,CAAC;QAEtC,IAAI,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACpD,gBAAgB;YAChB,KAAK,MAAM,SAAS,IAAI,OAAO,EAAE,CAAC;gBAC9B,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC;oBACrB,MAAM,CAAC,KAAK,CAAC,6BAA6B,SAAS,EAAE,CAAC,CAAC;oBACvD,OAAO;gBACX,CAAC;YACL,CAAC;YACD,IAAI,CAAC,MAAM,EAAE,CAAC;gBACV,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;gBACxD,OAAO;YACX,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,qBAAqB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,MAAM,GAAG,CAAC,CAAC;YACjF,WAAW,GAAG,MAAM,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,MAAM,EAAE,CAAC;YAChB,iBAAiB;YACjB,MAAM,CAAC,IAAI,CAAC,gCAAgC,MAAM,GAAG,CAAC,CAAC;YACvD,WAAW,GAAG,MAAM,WAAW,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,WAAW,EAAE,CAAC;YACd,MAAM,IAAI,GAAG,SAAS,CAAC,aAAa,CAAC,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;YACpE,KAAK,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YAC5B,MAAM,CAAC,IAAI,CAAC,mBAAmB,OAAO,EAAE,CAAC,CAAC;QAC9C,CAAC;aAAM,CAAC;YACJ,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QAC9C,CAAC;IAEL,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,4CAA4C,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACzG,CAAC;AACL,CAAC,CAAC"}
// EXTERNAL MODULE: ./node_modules/@elevenlabs/elevenlabs-js/index.js
var main_elevenlabs_js = __webpack_require__(91734);
;// ./dist-in/lib/tts-elevenlabs.js
diff --git a/packages/kbot/dist/package-lock.json b/packages/kbot/dist/package-lock.json
index 8fabef85..a86fc6d0 100644
--- a/packages/kbot/dist/package-lock.json
+++ b/packages/kbot/dist/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@plastichub/kbot",
- "version": "1.1.54",
+ "version": "1.1.55",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@plastichub/kbot",
- "version": "1.1.54",
+ "version": "1.1.55",
"license": "ISC",
"dependencies": {
"node-emoji": "^2.2.0"
diff --git a/packages/kbot/dist/package.json b/packages/kbot/dist/package.json
index 549cf7fe..2016812c 100644
--- a/packages/kbot/dist/package.json
+++ b/packages/kbot/dist/package.json
@@ -1,6 +1,6 @@
{
"name": "@plastichub/kbot",
- "version": "1.1.54",
+ "version": "1.1.55",
"main": "main_node.js",
"author": "",
"license": "ISC",
diff --git a/packages/kbot/docs/images-tauri-5.md b/packages/kbot/docs/images-tauri-5.md
index bf5710cc..486e1660 100644
--- a/packages/kbot/docs/images-tauri-5.md
+++ b/packages/kbot/docs/images-tauri-5.md
@@ -133,3 +133,4 @@ export class WebImageClient {
This structure will be decomposed into a detailed TODO roadmap in the following slice.
+
diff --git a/packages/kbot/src/commands/images.ts b/packages/kbot/src/commands/images.ts
index 25dd7933..395b70c6 100644
--- a/packages/kbot/src/commands/images.ts
+++ b/packages/kbot/src/commands/images.ts
@@ -448,7 +448,6 @@ async function launchGuiAndGetPrompt(argv: any): Promise {
});
}
-
export const imageCommand = async (argv: any) => {
const logger = new Logger({ minLevel: argv.logLevel || 2 });
diff --git a/packages/kbot/src/models/cache/openai-models.ts b/packages/kbot/src/models/cache/openai-models.ts
index 1d545eb8..c5292115 100644
--- a/packages/kbot/src/models/cache/openai-models.ts
+++ b/packages/kbot/src/models/cache/openai-models.ts
@@ -2,11 +2,11 @@ export enum E_OPENAI_MODEL {
MODEL_GPT_4_0613 = "gpt-4-0613",
MODEL_GPT_4 = "gpt-4",
MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo",
- MODEL_GPT_AUDIO = "gpt-audio",
- MODEL_GPT_5_NANO = "gpt-5-nano",
- MODEL_GPT_AUDIO_2025_08_28 = "gpt-audio-2025-08-28",
- MODEL_GPT_REALTIME = "gpt-realtime",
- MODEL_GPT_REALTIME_2025_08_28 = "gpt-realtime-2025-08-28",
+ MODEL_SORA_2_PRO = "sora-2-pro",
+ MODEL_GPT_AUDIO_MINI_2025_10_06 = "gpt-audio-mini-2025-10-06",
+ MODEL_GPT_REALTIME_MINI = "gpt-realtime-mini",
+ MODEL_GPT_REALTIME_MINI_2025_10_06 = "gpt-realtime-mini-2025-10-06",
+ MODEL_SORA_2 = "sora-2",
MODEL_DAVINCI_002 = "davinci-002",
MODEL_BABBAGE_002 = "babbage-002",
MODEL_GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct",
@@ -81,6 +81,16 @@ export enum E_OPENAI_MODEL {
MODEL_GPT_5_MINI_2025_08_07 = "gpt-5-mini-2025-08-07",
MODEL_GPT_5_MINI = "gpt-5-mini",
MODEL_GPT_5_NANO_2025_08_07 = "gpt-5-nano-2025-08-07",
+ MODEL_GPT_5_NANO = "gpt-5-nano",
+ MODEL_GPT_AUDIO_2025_08_28 = "gpt-audio-2025-08-28",
+ MODEL_GPT_REALTIME = "gpt-realtime",
+ MODEL_GPT_REALTIME_2025_08_28 = "gpt-realtime-2025-08-28",
+ MODEL_GPT_AUDIO = "gpt-audio",
+ MODEL_GPT_5_CODEX = "gpt-5-codex",
+ MODEL_GPT_IMAGE_1_MINI = "gpt-image-1-mini",
+ MODEL_GPT_5_PRO_2025_10_06 = "gpt-5-pro-2025-10-06",
+ MODEL_GPT_5_PRO = "gpt-5-pro",
+ MODEL_GPT_AUDIO_MINI = "gpt-audio-mini",
MODEL_GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k",
MODEL_TTS_1 = "tts-1",
MODEL_WHISPER_1 = "whisper-1",
diff --git a/packages/kbot/src/models/cache/openrouter-models-free.ts b/packages/kbot/src/models/cache/openrouter-models-free.ts
index ad786b99..37f6e0f8 100644
--- a/packages/kbot/src/models/cache/openrouter-models-free.ts
+++ b/packages/kbot/src/models/cache/openrouter-models-free.ts
@@ -1,8 +1,8 @@
export enum E_OPENROUTER_MODEL_FREE {
- MODEL_FREE_X_AI_GROK_4_FAST_FREE = "x-ai/grok-4-fast:free",
+ MODEL_FREE_ALIBABA_TONGYI_DEEPRESEARCH_30B_A3B_FREE = "alibaba/tongyi-deepresearch-30b-a3b:free",
+ MODEL_FREE_MEITUAN_LONGCAT_FLASH_CHAT_FREE = "meituan/longcat-flash-chat:free",
MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
- MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free",
MODEL_FREE_QWEN_QWEN3_CODER_FREE = "qwen/qwen3-coder:free",
@@ -28,7 +28,6 @@ export enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_SHISA_AI_SHISA_V2_LLAMA3_3_70B_FREE = "shisa-ai/shisa-v2-llama3.3-70b:free",
MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE = "arliai/qwq-32b-arliai-rpr-v1:free",
MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE = "agentica-org/deepcoder-14b-preview:free",
- MODEL_FREE_MOONSHOTAI_KIMI_VL_A3B_THINKING_FREE = "moonshotai/kimi-vl-a3b-thinking:free",
MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
@@ -37,9 +36,7 @@ export enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free",
- MODEL_FREE_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free",
MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free",
- MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free",
MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free",
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free",
@@ -50,7 +47,6 @@ export enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free",
MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
- MODEL_FREE_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free",
MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free"
diff --git a/packages/kbot/src/models/cache/openrouter-models.ts b/packages/kbot/src/models/cache/openrouter-models.ts
index 47df4f89..13f2e3ea 100644
--- a/packages/kbot/src/models/cache/openrouter-models.ts
+++ b/packages/kbot/src/models/cache/openrouter-models.ts
@@ -1,20 +1,39 @@
export enum E_OPENROUTER_MODEL {
- MODEL_X_AI_GROK_4_FAST_FREE = "x-ai/grok-4-fast:free",
+ MODEL_INCLUSIONAI_LING_1T = "inclusionai/ling-1t",
+ MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1_5 = "nvidia/llama-3.3-nemotron-super-49b-v1.5",
+ MODEL_BAIDU_ERNIE_4_5_21B_A3B_THINKING = "baidu/ernie-4.5-21b-a3b-thinking",
+ MODEL_GOOGLE_GEMINI_2_5_FLASH_IMAGE = "google/gemini-2.5-flash-image",
+ MODEL_QWEN_QWEN3_VL_30B_A3B_THINKING = "qwen/qwen3-vl-30b-a3b-thinking",
+ MODEL_QWEN_QWEN3_VL_30B_A3B_INSTRUCT = "qwen/qwen3-vl-30b-a3b-instruct",
+ MODEL_OPENAI_GPT_5_PRO = "openai/gpt-5-pro",
+ MODEL_Z_AI_GLM_4_6 = "z-ai/glm-4.6",
+ MODEL_ANTHROPIC_CLAUDE_SONNET_4_5 = "anthropic/claude-sonnet-4.5",
+ MODEL_DEEPSEEK_DEEPSEEK_V3_2_EXP = "deepseek/deepseek-v3.2-exp",
+ MODEL_THEDRUMMER_CYDONIA_24B_V4_1 = "thedrummer/cydonia-24b-v4.1",
+ MODEL_RELACE_RELACE_APPLY_3 = "relace/relace-apply-3",
+ MODEL_GOOGLE_GEMINI_2_5_FLASH_PREVIEW_09_2025 = "google/gemini-2.5-flash-preview-09-2025",
+ MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_09_2025 = "google/gemini-2.5-flash-lite-preview-09-2025",
+ MODEL_QWEN_QWEN3_VL_235B_A22B_THINKING = "qwen/qwen3-vl-235b-a22b-thinking",
+ MODEL_QWEN_QWEN3_VL_235B_A22B_INSTRUCT = "qwen/qwen3-vl-235b-a22b-instruct",
+ MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max",
+ MODEL_QWEN_QWEN3_CODER_PLUS = "qwen/qwen3-coder-plus",
+ MODEL_OPENAI_GPT_5_CODEX = "openai/gpt-5-codex",
+ MODEL_DEEPSEEK_DEEPSEEK_V3_1_TERMINUS = "deepseek/deepseek-v3.1-terminus",
+ MODEL_X_AI_GROK_4_FAST = "x-ai/grok-4-fast",
+ MODEL_ALIBABA_TONGYI_DEEPRESEARCH_30B_A3B_FREE = "alibaba/tongyi-deepresearch-30b-a3b:free",
MODEL_ALIBABA_TONGYI_DEEPRESEARCH_30B_A3B = "alibaba/tongyi-deepresearch-30b-a3b",
MODEL_QWEN_QWEN3_CODER_FLASH = "qwen/qwen3-coder-flash",
- MODEL_QWEN_QWEN3_CODER_PLUS = "qwen/qwen3-coder-plus",
MODEL_ARCEE_AI_AFM_4_5B = "arcee-ai/afm-4.5b",
MODEL_OPENGVLAB_INTERNVL3_78B = "opengvlab/internvl3-78b",
MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING = "qwen/qwen3-next-80b-a3b-thinking",
MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT = "qwen/qwen3-next-80b-a3b-instruct",
+ MODEL_MEITUAN_LONGCAT_FLASH_CHAT_FREE = "meituan/longcat-flash-chat:free",
MODEL_MEITUAN_LONGCAT_FLASH_CHAT = "meituan/longcat-flash-chat",
MODEL_QWEN_QWEN_PLUS_2025_07_28 = "qwen/qwen-plus-2025-07-28",
MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2",
- MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max",
MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905",
- MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE = "deepcogito/cogito-v2-preview-llama-109b-moe",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B = "deepcogito/cogito-v2-preview-deepseek-671b",
MODEL_STEPFUN_AI_STEP3 = "stepfun-ai/step3",
@@ -25,7 +44,6 @@ export enum E_OPENROUTER_MODEL {
MODEL_GOOGLE_GEMINI_2_5_FLASH_IMAGE_PREVIEW = "google/gemini-2.5-flash-image-preview",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_1 = "deepseek/deepseek-chat-v3.1",
- MODEL_DEEPSEEK_DEEPSEEK_V3_1_BASE = "deepseek/deepseek-v3.1-base",
MODEL_OPENAI_GPT_4O_AUDIO_PREVIEW = "openai/gpt-4o-audio-preview",
MODEL_MISTRALAI_MISTRAL_MEDIUM_3_1 = "mistralai/mistral-medium-3.1",
MODEL_BAIDU_ERNIE_4_5_21B_A3B = "baidu/ernie-4.5-21b-a3b",
@@ -37,7 +55,6 @@ export enum E_OPENROUTER_MODEL {
MODEL_OPENAI_GPT_5 = "openai/gpt-5",
MODEL_OPENAI_GPT_5_MINI = "openai/gpt-5-mini",
MODEL_OPENAI_GPT_5_NANO = "openai/gpt-5-nano",
- MODEL_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_OPENAI_GPT_OSS_120B = "openai/gpt-oss-120b",
MODEL_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
MODEL_OPENAI_GPT_OSS_20B = "openai/gpt-oss-20b",
@@ -67,6 +84,7 @@ export enum E_OPENROUTER_MODEL {
MODEL_TENCENT_HUNYUAN_A13B_INSTRUCT_FREE = "tencent/hunyuan-a13b-instruct:free",
MODEL_TENCENT_HUNYUAN_A13B_INSTRUCT = "tencent/hunyuan-a13b-instruct",
MODEL_TNGTECH_DEEPSEEK_R1T2_CHIMERA_FREE = "tngtech/deepseek-r1t2-chimera:free",
+ MODEL_TNGTECH_DEEPSEEK_R1T2_CHIMERA = "tngtech/deepseek-r1t2-chimera",
MODEL_MORPH_MORPH_V3_LARGE = "morph/morph-v3-large",
MODEL_MORPH_MORPH_V3_FAST = "morph/morph-v3-fast",
MODEL_BAIDU_ERNIE_4_5_VL_424B_A47B = "baidu/ernie-4.5-vl-424b-a47b",
@@ -131,6 +149,7 @@ export enum E_OPENROUTER_MODEL {
MODEL_OPENAI_O4_MINI = "openai/o4-mini",
MODEL_SHISA_AI_SHISA_V2_LLAMA3_3_70B_FREE = "shisa-ai/shisa-v2-llama3.3-70b:free",
MODEL_SHISA_AI_SHISA_V2_LLAMA3_3_70B = "shisa-ai/shisa-v2-llama3.3-70b",
+ MODEL_QWEN_QWEN2_5_CODER_7B_INSTRUCT = "qwen/qwen2.5-coder-7b-instruct",
MODEL_OPENAI_GPT_4_1 = "openai/gpt-4.1",
MODEL_OPENAI_GPT_4_1_MINI = "openai/gpt-4.1-mini",
MODEL_OPENAI_GPT_4_1_NANO = "openai/gpt-4.1-nano",
@@ -140,8 +159,6 @@ export enum E_OPENROUTER_MODEL {
MODEL_ARLIAI_QWQ_32B_ARLIAI_RPR_V1 = "arliai/qwq-32b-arliai-rpr-v1",
MODEL_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE = "agentica-org/deepcoder-14b-preview:free",
MODEL_AGENTICA_ORG_DEEPCODER_14B_PREVIEW = "agentica-org/deepcoder-14b-preview",
- MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING_FREE = "moonshotai/kimi-vl-a3b-thinking:free",
- MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING = "moonshotai/kimi-vl-a3b-thinking",
MODEL_X_AI_GROK_3_MINI_BETA = "x-ai/grok-3-mini-beta",
MODEL_X_AI_GROK_3_BETA = "x-ai/grok-3-beta",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1",
@@ -167,27 +184,23 @@ export enum E_OPENROUTER_MODEL {
MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW = "openai/gpt-4o-search-preview",
MODEL_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free",
MODEL_GOOGLE_GEMMA_3_27B_IT = "google/gemma-3-27b-it",
- MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1 = "thedrummer/anubis-pro-105b-v1",
MODEL_THEDRUMMER_SKYFALL_36B_V2 = "thedrummer/skyfall-36b-v2",
MODEL_MICROSOFT_PHI_4_MULTIMODAL_INSTRUCT = "microsoft/phi-4-multimodal-instruct",
MODEL_PERPLEXITY_SONAR_REASONING_PRO = "perplexity/sonar-reasoning-pro",
MODEL_PERPLEXITY_SONAR_PRO = "perplexity/sonar-pro",
MODEL_PERPLEXITY_SONAR_DEEP_RESEARCH = "perplexity/sonar-deep-research",
- MODEL_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free",
MODEL_QWEN_QWQ_32B = "qwen/qwq-32b",
MODEL_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free",
+ MODEL_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW = "nousresearch/deephermes-3-llama-3-8b-preview",
MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001 = "google/gemini-2.0-flash-lite-001",
MODEL_ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet",
MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING = "anthropic/claude-3.7-sonnet:thinking",
MODEL_PERPLEXITY_R1_1776 = "perplexity/r1-1776",
MODEL_MISTRALAI_MISTRAL_SABA = "mistralai/mistral-saba",
- MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
- MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B = "cognitivecomputations/dolphin3.0-r1-mistral-24b",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B = "cognitivecomputations/dolphin3.0-mistral-24b",
MODEL_META_LLAMA_LLAMA_GUARD_3_8B = "meta-llama/llama-guard-3-8b",
MODEL_OPENAI_O3_MINI_HIGH = "openai/o3-mini-high",
- MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B = "deepseek/deepseek-r1-distill-llama-8b",
MODEL_GOOGLE_GEMINI_2_0_FLASH_001 = "google/gemini-2.0-flash-001",
MODEL_QWEN_QWEN_VL_PLUS = "qwen/qwen-vl-plus",
MODEL_AION_LABS_AION_1_0 = "aion-labs/aion-1.0",
@@ -215,6 +228,7 @@ export enum E_OPENROUTER_MODEL {
MODEL_MINIMAX_MINIMAX_01 = "minimax/minimax-01",
MODEL_MISTRALAI_CODESTRAL_2501 = "mistralai/codestral-2501",
MODEL_MICROSOFT_PHI_4 = "microsoft/phi-4",
+ MODEL_SAO10K_L3_1_70B_HANAMI_X1 = "sao10k/l3.1-70b-hanami-x1",
MODEL_DEEPSEEK_DEEPSEEK_CHAT = "deepseek/deepseek-chat",
MODEL_SAO10K_L3_3_EURYALE_70B = "sao10k/l3.3-euryale-70b",
MODEL_OPENAI_O1 = "openai/o1",
@@ -225,7 +239,6 @@ export enum E_OPENROUTER_MODEL {
MODEL_AMAZON_NOVA_LITE_V1 = "amazon/nova-lite-v1",
MODEL_AMAZON_NOVA_MICRO_V1 = "amazon/nova-micro-v1",
MODEL_AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1",
- MODEL_QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview",
MODEL_OPENAI_GPT_4O_2024_11_20 = "openai/gpt-4o-2024-11-20",
MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411",
MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407",
@@ -236,30 +249,29 @@ export enum E_OPENROUTER_MODEL {
MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
- MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
+ MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
- MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
- MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
+ MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
- MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
+ MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
@@ -269,61 +281,53 @@ export enum E_OPENROUTER_MODEL {
MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b",
MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06",
MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b",
- MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
- MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
+ MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
- MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18",
+ MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it",
MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free",
MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620",
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
+ MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
- MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
- MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
- MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5",
MODEL_OPENAI_GPT_4O = "openai/gpt-4o",
MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended",
- MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
+ MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct",
MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct",
MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b",
- MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5",
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
- MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
- MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
- MODEL_COHERE_COMMAND = "cohere/command",
- MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus",
- MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024",
MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large",
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
- MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
+ MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
+ MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2",
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
MODEL_OPENROUTER_AUTO = "openrouter/auto",
MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview",
- MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
+ MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k",
MODEL_MANCER_WEAVER = "mancer/weaver",
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
- MODEL_OPENAI_GPT_4 = "openai/gpt-4",
- MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314"
+ MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
+ MODEL_OPENAI_GPT_4 = "openai/gpt-4"
}
\ No newline at end of file
diff --git a/packages/kbot/tests/scripted/files.sh b/packages/kbot/tests/scripted/files.sh
deleted file mode 100644
index 85e24a66..00000000
--- a/packages/kbot/tests/scripted/files.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-kbot-d "Create a comprehensive Readme.md, in ./tests/scripted/readme.md (installation, usage, examples, etc), with Mermaid diagrams (no braces in node names)" \
- --model=openai/o3-mini \
- --disable=npm,terminal,interact,git,search,web,user,email \
- --include=./src/commands/*.ts \
- --preferences=none \
- --logLevel=2
diff --git a/packages/kbot/tests/scripted/readme.md b/packages/kbot/tests/scripted/readme.md
deleted file mode 100644
index a7ce51e5..00000000
--- a/packages/kbot/tests/scripted/readme.md
+++ /dev/null
@@ -1,147 +0,0 @@
-KBot - Command Line AI Assistant
-================================
-
-KBot is a powerful command line tool that leverages AI models to assist with code generation, file summarization, multi-file processing, and more.
-
-Table of Contents
------------------
-
-- [Installation](#installation)
-- [Usage](#usage)
-- [Examples](#examples)
-- [Architecture](#architecture)
-- [Configuration](#configuration)
-- [Development](#development)
-- [License](#license)
-
-Installation
-------------
-
-1. Ensure you have [Node.js](https://nodejs.org/) installed (version 14+ recommended).
-2. Clone the repository:
-
-```bash
-git clone https://github.com/yourusername/kbot.git
-cd kbot
-```
-
-3. Install dependencies:
-
-```bash
-npm install
-```
-
-4. Build the project (if applicable):
-
-```bash
-npm run build
-```
-
-Usage
------
-
-KBot provides a rich set of commands for interacting with AI tools. Here are some common commands:
-
-- **Run a task:**
-
-```bash
-kbot run --prompt "Summarize the project" --path ./src
-```
-
-- **Fetch available models:**
-
-```bash
-kbot fetch
-```
-
-- **Modify configurations:**
-
-```bash
-kbot init
-```
-
-Examples
---------
-
-Below are some usage examples:
-
-1. **Summarize Project Files**:
-
-```bash
-kbot run --prompt "Give me a summary of the project files" --path ./my_project --include "*.js"
-```
-
-2. **Generate Documentation**:
-
-```bash
-kbot run --prompt "Generate documentation for the codebase" --dst ./docs/README.md
-```
-
-3. **Personalized Assistant**:
-
-Use your own preferences and profiles stored in `./.kbot/preferences.md` for a personalized experience.
-
-Architecture
-------------
-
-The following Mermaid diagram illustrates the high-level architecture of KBot:
-
-```mermaid
-flowchart TD
-Start[Start] --> Config[Load Config]
-Config --> Init[Initialize Client]
-Init --> Process[Process Request]
-Process --> Options[Set Options]
-Options --> Execute[Execute Task]
-Execute --> End[Return Result]
-```
-
-The flow begins with startup, loads the configuration (preferences and settings), initializes the API client, gathers and processes user requests, sets task-specific options, executes the task (running the completion, tools, or assistant mode), and finally returns the result.
-
-Configuration
--------------
-
-KBot uses a configuration file located at `./.kbot/config.json` and a preferences file at `./.kbot/preferences.md` to customize behavior:
-
-- **config.json**: Contains API keys and service configurations for OpenAI, OpenRouter, and more.
-- **preferences.md**: Stores personal information to tailor the assistant responses.
-
-Development
------------
-
-- **Run tests:**
-
-```bash
-npm test
-```
-
-- **Build the project:**
-
-```bash
-npm run build
-```
-
-- **Lint the code:**
-
-```bash
-npm run lint
-```
-
-Mermaid Diagrams
-----------------
-
-Mermaid diagrams are used to visually represent the various components and flows within KBot. Here is another example diagram illustrating the internal processing:
-
-```mermaid
-flowchart LR
-User[User Input] --> CLI[Command Line Parser]
-CLI --> Processor[Task Processor]
-Processor --> API[API Client]
-API --> Collector[Response Collector]
-Collector --> Output[Display Output]
-```
-
-License
--------
-
-This project is licensed under the MIT License.