Files
Laca-City/backend/node_modules/strtok3/lib/core.d.ts
PhongPham c65cc97a33 🎯 MapView v2.0 - Global Deployment Ready
 MAJOR FEATURES:
• Auto-zoom intelligence với smart bounds fitting
• Enhanced 3D GPS markers với pulsing effects
• Professional route display với 6-layer rendering
• Status-based parking icons với availability indicators
• Production-ready build optimizations

🗺️ AUTO-ZOOM FEATURES:
• Smart bounds fitting cho GPS + selected parking
• Adaptive padding (50px) cho visual balance
• Max zoom control (level 16) để tránh quá gần
• Dynamic centering khi không có selection

🎨 ENHANCED VISUALS:
• 3D GPS marker với multi-layer pulse effects
• Advanced parking icons với status colors
• Selection highlighting với animation
• Dimming system cho non-selected items

🛣️ ROUTE SYSTEM:
• OpenRouteService API integration
• Multi-layer route rendering (glow, shadow, main, animated)
• Real-time distance & duration calculation
• Visual route info trong popup

📱 PRODUCTION READY:
• SSR safe với dynamic imports
• Build errors resolved
• Global deployment via Vercel
• Optimized performance

🌍 DEPLOYMENT:
• Vercel: https://whatever-ctk2auuxr-phong12hexdockworks-projects.vercel.app
• Bundle size: 22.8 kB optimized
• Global CDN distribution
• HTTPS enabled

💾 VERSION CONTROL:
• MapView-v2.0.tsx backup created
• MAPVIEW_VERSIONS.md documentation
• Full version history tracking
2025-07-20 19:52:16 +07:00

41 lines
2.0 KiB
TypeScript

import type { Readable } from 'node:stream';
import { type AnyWebByteStream } from './stream/index.js';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { BufferTokenizer } from './BufferTokenizer.js';
import type { ITokenizerOptions } from './types.js';
import { BlobTokenizer } from './BlobTokenizer.js';
export { EndOfStreamError, AbortError, type AnyWebByteStream } from './stream/index.js';
export type { ITokenizer, IRandomAccessTokenizer, IFileInfo, IRandomAccessFileInfo, ITokenizerOptions, IReadChunkOptions, OnClose } from './types.js';
export type { IToken, IGetToken } from '@tokenizer/token';
export { AbstractTokenizer } from './AbstractTokenizer.js';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param options - Tokenizer options
* @returns ReadStreamTokenizer
*/
export declare function fromStream(stream: Readable, options?: ITokenizerOptions): ReadStreamTokenizer;
/**
* Construct ReadStreamTokenizer from given ReadableStream (WebStream API).
* Will set fileSize, if provided given Stream has set the .path property/
* @param webStream - Read from Node.js Stream.Readable (must be a byte stream)
* @param options - Tokenizer options
* @returns ReadStreamTokenizer
*/
export declare function fromWebStream(webStream: AnyWebByteStream, options?: ITokenizerOptions): ReadStreamTokenizer;
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param options - Tokenizer options
* @returns BufferTokenizer
*/
export declare function fromBuffer(uint8Array: Uint8Array, options?: ITokenizerOptions): BufferTokenizer;
/**
* Construct ReadStreamTokenizer from given Blob.
* @param blob - Uint8Array to tokenize
* @param options - Tokenizer options
* @returns BufferTokenizer
*/
export declare function fromBlob(blob: Blob, options?: ITokenizerOptions): BlobTokenizer;