🎯 MapView v2.0 - Global Deployment Ready

 MAJOR FEATURES:
• Auto-zoom intelligence với smart bounds fitting
• Enhanced 3D GPS markers với pulsing effects
• Professional route display với 6-layer rendering
• Status-based parking icons với availability indicators
• Production-ready build optimizations

🗺️ AUTO-ZOOM FEATURES:
• Smart bounds fitting cho GPS + selected parking
• Adaptive padding (50px) cho visual balance
• Max zoom control (level 16) để tránh quá gần
• Dynamic centering khi không có selection

🎨 ENHANCED VISUALS:
• 3D GPS marker với multi-layer pulse effects
• Advanced parking icons với status colors
• Selection highlighting với animation
• Dimming system cho non-selected items

🛣️ ROUTE SYSTEM:
• OpenRouteService API integration
• Multi-layer route rendering (glow, shadow, main, animated)
• Real-time distance & duration calculation
• Visual route info trong popup

📱 PRODUCTION READY:
• SSR safe với dynamic imports
• Build errors resolved
• Global deployment via Vercel
• Optimized performance

🌍 DEPLOYMENT:
• Vercel: https://whatever-ctk2auuxr-phong12hexdockworks-projects.vercel.app
• Bundle size: 22.8 kB optimized
• Global CDN distribution
• HTTPS enabled

💾 VERSION CONTROL:
• MapView-v2.0.tsx backup created
• MAPVIEW_VERSIONS.md documentation
• Full version history tracking
This commit is contained in:
2025-07-20 19:52:16 +07:00
parent 3203463a6a
commit c65cc97a33
64624 changed files with 7199453 additions and 6462 deletions

View File

@@ -0,0 +1,94 @@
/**
* Ref https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
*/
import type { IGetToken } from "strtok3";
export declare const Signature: {
LocalFileHeader: number;
DataDescriptor: number;
CentralFileHeader: number;
EndOfCentralDirectory: number;
};
interface IDataDescriptor {
signature: number;
compressedSize: number;
uncompressedSize: number;
}
export interface ILocalFileHeader extends IDataDescriptor {
minVersion: number;
dataDescriptor: boolean;
compressedMethod: number;
compressedSize: number;
uncompressedSize: number;
filenameLength: number;
extraFieldLength: number;
filename: string;
}
export declare const DataDescriptor: IGetToken<IDataDescriptor>;
/**
* First part of the ZIP Local File Header
* Offset | Bytes| Description
* -------|------+-------------------------------------------------------------------
* 0 | 4 | Signature (0x04034b50)
* 4 | 2 | Minimum version needed to extract
* 6 | 2 | Bit flag
* 8 | 2 | Compression method
* 10 | 2 | File last modification time (MS-DOS format)
* 12 | 2 | File last modification date (MS-DOS format)
* 14 | 4 | CRC-32 of uncompressed data
* 18 | 4 | Compressed size
* 22 | 4 | Uncompressed size
* 26 | 2 | File name length (n)
* 28 | 2 | Extra field length (m)
* 30 | n | File name
* 30 + n | m | Extra field
*/
export declare const LocalFileHeaderToken: IGetToken<ILocalFileHeader>;
interface IEndOfCentralDirectoryRecord {
signature: number;
nrOfThisDisk: number;
nrOfThisDiskWithTheStart: number;
nrOfEntriesOnThisDisk: number;
nrOfEntriesOfSize: number;
sizeOfCd: number;
offsetOfStartOfCd: number;
zipFileCommentLength: number;
}
/**
* 4.3.16 End of central directory record:
* end of central dir signature (0x06064b50) 4 bytes
* number of this disk 2 bytes
* number of the disk with the start of the central directory 2 bytes
* total number of entries in the central directory on this disk 2 bytes
* total number of entries in the size of the central directory 2 bytes
* sizeOfTheCentralDirectory 4 bytes
* offset of start of central directory with respect to the starting disk number 4 bytes
* .ZIP file comment length 2 bytes
* .ZIP file comment (variable size)
*/
export declare const EndOfCentralDirectoryRecordToken: IGetToken<IEndOfCentralDirectoryRecord>;
export interface IFileHeader extends ILocalFileHeader {
fileCommentLength: number;
relativeOffsetOfLocalHeader: number;
}
/**
* File header:
* central file header signature 4 bytes 0 (0x02014b50)
* version made by 2 bytes 4
* version needed to extract 2 bytes 6
* general purpose bit flag 2 bytes 8
* compression method 2 bytes 10
* last mod file time 2 bytes 12
* last mod file date 2 bytes 14
* crc-32 4 bytes 16
* compressed size 4 bytes 20
* uncompressed size 4 bytes 24
* file name length 2 bytes 28
* extra field length 2 bytes 30
* file comment length 2 bytes 32
* disk number start 2 bytes 34
* internal file attributes 2 bytes 36
* external file attributes 4 bytes 38
* relative offset of local header 4 bytes 42
*/
export declare const FileHeader: IGetToken<IFileHeader>;
export {};

118
backend/node_modules/@tokenizer/inflate/lib/ZipToken.js generated vendored Normal file
View File

@@ -0,0 +1,118 @@
/**
* Ref https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
*/
import { UINT16_LE, UINT32_LE } from "token-types";
export const Signature = {
LocalFileHeader: 0x04034b50,
DataDescriptor: 0x08074b50,
CentralFileHeader: 0x02014b50,
EndOfCentralDirectory: 0x06054b50
};
export const DataDescriptor = {
get(array) {
const flags = UINT16_LE.get(array, 6);
return {
signature: UINT32_LE.get(array, 0),
compressedSize: UINT32_LE.get(array, 8),
uncompressedSize: UINT32_LE.get(array, 12),
};
}, len: 16
};
/**
* First part of the ZIP Local File Header
* Offset | Bytes| Description
* -------|------+-------------------------------------------------------------------
* 0 | 4 | Signature (0x04034b50)
* 4 | 2 | Minimum version needed to extract
* 6 | 2 | Bit flag
* 8 | 2 | Compression method
* 10 | 2 | File last modification time (MS-DOS format)
* 12 | 2 | File last modification date (MS-DOS format)
* 14 | 4 | CRC-32 of uncompressed data
* 18 | 4 | Compressed size
* 22 | 4 | Uncompressed size
* 26 | 2 | File name length (n)
* 28 | 2 | Extra field length (m)
* 30 | n | File name
* 30 + n | m | Extra field
*/
export const LocalFileHeaderToken = {
get(array) {
const flags = UINT16_LE.get(array, 6);
return {
signature: UINT32_LE.get(array, 0),
minVersion: UINT16_LE.get(array, 4),
dataDescriptor: !!(flags & 0x0008),
compressedMethod: UINT16_LE.get(array, 8),
compressedSize: UINT32_LE.get(array, 18),
uncompressedSize: UINT32_LE.get(array, 22),
filenameLength: UINT16_LE.get(array, 26),
extraFieldLength: UINT16_LE.get(array, 28),
filename: null
};
}, len: 30
};
/**
* 4.3.16 End of central directory record:
* end of central dir signature (0x06064b50) 4 bytes
* number of this disk 2 bytes
* number of the disk with the start of the central directory 2 bytes
* total number of entries in the central directory on this disk 2 bytes
* total number of entries in the size of the central directory 2 bytes
* sizeOfTheCentralDirectory 4 bytes
* offset of start of central directory with respect to the starting disk number 4 bytes
* .ZIP file comment length 2 bytes
* .ZIP file comment (variable size)
*/
export const EndOfCentralDirectoryRecordToken = {
get(array) {
return {
signature: UINT32_LE.get(array, 0),
nrOfThisDisk: UINT16_LE.get(array, 4),
nrOfThisDiskWithTheStart: UINT16_LE.get(array, 6),
nrOfEntriesOnThisDisk: UINT16_LE.get(array, 8),
nrOfEntriesOfSize: UINT16_LE.get(array, 10),
sizeOfCd: UINT32_LE.get(array, 12),
offsetOfStartOfCd: UINT32_LE.get(array, 16),
zipFileCommentLength: UINT16_LE.get(array, 20),
};
}, len: 22
};
/**
* File header:
* central file header signature 4 bytes 0 (0x02014b50)
* version made by 2 bytes 4
* version needed to extract 2 bytes 6
* general purpose bit flag 2 bytes 8
* compression method 2 bytes 10
* last mod file time 2 bytes 12
* last mod file date 2 bytes 14
* crc-32 4 bytes 16
* compressed size 4 bytes 20
* uncompressed size 4 bytes 24
* file name length 2 bytes 28
* extra field length 2 bytes 30
* file comment length 2 bytes 32
* disk number start 2 bytes 34
* internal file attributes 2 bytes 36
* external file attributes 4 bytes 38
* relative offset of local header 4 bytes 42
*/
export const FileHeader = {
get(array) {
const flags = UINT16_LE.get(array, 8);
return {
signature: UINT32_LE.get(array, 0),
minVersion: UINT16_LE.get(array, 6),
dataDescriptor: !!(flags & 0x0008),
compressedMethod: UINT16_LE.get(array, 10),
compressedSize: UINT32_LE.get(array, 20),
uncompressedSize: UINT32_LE.get(array, 24),
filenameLength: UINT16_LE.get(array, 28),
extraFieldLength: UINT16_LE.get(array, 30),
fileCommentLength: UINT16_LE.get(array, 32),
relativeOffsetOfLocalHeader: UINT32_LE.get(array, 42),
filename: null
};
}, len: 46
};

24
backend/node_modules/@tokenizer/inflate/lib/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,24 @@
import type { ITokenizer } from 'strtok3';
import { type IFileHeader, type ILocalFileHeader } from "./ZipToken.js";
export type InflateFileFilterResult = {
handler: InflatedDataHandler | false;
stop?: boolean;
};
/**
* Return false when to ignore the file, return `InflatedDataHandler` to handle extracted data
*/
export type InflateFileFilter = (file: ILocalFileHeader) => InflateFileFilterResult;
export type InflatedDataHandler = (fileData: Uint8Array) => Promise<void>;
export declare class ZipHandler {
private tokenizer;
private syncBuffer;
constructor(tokenizer: ITokenizer);
isZip(): Promise<boolean>;
private peekSignature;
findEndOfCentralDirectoryLocator(): Promise<number>;
readCentralDirectory(): Promise<IFileHeader[] | undefined>;
unzip(fileCb: InflateFileFilter): Promise<void>;
private iterateOverCentralDirectory;
private inflate;
private readLocalFileHeader;
}

206
backend/node_modules/@tokenizer/inflate/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,206 @@
import { StringType, UINT32_LE } from 'token-types';
import { decompressSync } from 'fflate';
import initDebug from 'debug';
import { DataDescriptor, EndOfCentralDirectoryRecordToken, FileHeader, LocalFileHeaderToken, Signature } from "./ZipToken.js";
function signatureToArray(signature) {
const signatureBytes = new Uint8Array(UINT32_LE.len);
UINT32_LE.put(signatureBytes, 0, signature);
return signatureBytes;
}
const debug = initDebug('tokenizer:inflate');
const syncBufferSize = 256 * 1024;
const ddSignatureArray = signatureToArray(Signature.DataDescriptor);
const eocdSignatureBytes = signatureToArray(Signature.EndOfCentralDirectory);
export class ZipHandler {
constructor(tokenizer) {
this.tokenizer = tokenizer;
this.syncBuffer = new Uint8Array(syncBufferSize);
}
async isZip() {
return await this.peekSignature() === Signature.LocalFileHeader;
}
peekSignature() {
return this.tokenizer.peekToken(UINT32_LE);
}
async findEndOfCentralDirectoryLocator() {
const randomReadTokenizer = this.tokenizer;
const chunkLength = Math.min(16 * 1024, randomReadTokenizer.fileInfo.size);
const buffer = this.syncBuffer.subarray(0, chunkLength);
await this.tokenizer.readBuffer(buffer, { position: randomReadTokenizer.fileInfo.size - chunkLength });
// Search the buffer from end to beginning for EOCD signature
// const signature = 0x06054b50;
for (let i = buffer.length - 4; i >= 0; i--) {
// Compare 4 bytes directly without calling readUInt32LE
if (buffer[i] === eocdSignatureBytes[0] &&
buffer[i + 1] === eocdSignatureBytes[1] &&
buffer[i + 2] === eocdSignatureBytes[2] &&
buffer[i + 3] === eocdSignatureBytes[3]) {
return randomReadTokenizer.fileInfo.size - chunkLength + i;
}
}
return -1;
}
async readCentralDirectory() {
if (!this.tokenizer.supportsRandomAccess()) {
debug('Cannot reading central-directory without random-read support');
return;
}
debug('Reading central-directory...');
const pos = this.tokenizer.position;
const offset = await this.findEndOfCentralDirectoryLocator();
if (offset > 0) {
debug('Central-directory 32-bit signature found');
const eocdHeader = await this.tokenizer.readToken(EndOfCentralDirectoryRecordToken, offset);
const files = [];
this.tokenizer.setPosition(eocdHeader.offsetOfStartOfCd);
for (let n = 0; n < eocdHeader.nrOfEntriesOfSize; ++n) {
const entry = await this.tokenizer.readToken(FileHeader);
if (entry.signature !== Signature.CentralFileHeader) {
throw new Error('Expected Central-File-Header signature');
}
entry.filename = await this.tokenizer.readToken(new StringType(entry.filenameLength, 'utf-8'));
await this.tokenizer.ignore(entry.extraFieldLength);
await this.tokenizer.ignore(entry.fileCommentLength);
files.push(entry);
debug(`Add central-directory file-entry: n=${n + 1}/${files.length}: filename=${files[n].filename}`);
}
this.tokenizer.setPosition(pos);
return files;
}
this.tokenizer.setPosition(pos);
}
async unzip(fileCb) {
const entries = await this.readCentralDirectory();
if (entries) {
// Use Central Directory to iterate over files
return this.iterateOverCentralDirectory(entries, fileCb);
}
// Scan Zip files for local-file-header
let stop = false;
do {
const zipHeader = await this.readLocalFileHeader();
if (!zipHeader)
break;
const next = fileCb(zipHeader);
stop = !!next.stop;
let fileData = undefined;
await this.tokenizer.ignore(zipHeader.extraFieldLength);
if (zipHeader.dataDescriptor && zipHeader.compressedSize === 0) {
const chunks = [];
let len = syncBufferSize;
debug('Compressed-file-size unknown, scanning for next data-descriptor-signature....');
let nextHeaderIndex = -1;
while (nextHeaderIndex < 0 && len === syncBufferSize) {
len = await this.tokenizer.peekBuffer(this.syncBuffer, { mayBeLess: true });
nextHeaderIndex = indexOf(this.syncBuffer.subarray(0, len), ddSignatureArray);
const size = nextHeaderIndex >= 0 ? nextHeaderIndex : len;
if (next.handler) {
const data = new Uint8Array(size);
await this.tokenizer.readBuffer(data);
chunks.push(data);
}
else {
// Move position to the next header if found, skip the whole buffer otherwise
await this.tokenizer.ignore(size);
}
}
debug(`Found data-descriptor-signature at pos=${this.tokenizer.position}`);
if (next.handler) {
await this.inflate(zipHeader, mergeArrays(chunks), next.handler);
}
}
else {
if (next.handler) {
debug(`Reading compressed-file-data: ${zipHeader.compressedSize} bytes`);
fileData = new Uint8Array(zipHeader.compressedSize);
await this.tokenizer.readBuffer(fileData);
await this.inflate(zipHeader, fileData, next.handler);
}
else {
debug(`Ignoring compressed-file-data: ${zipHeader.compressedSize} bytes`);
await this.tokenizer.ignore(zipHeader.compressedSize);
}
}
debug(`Reading data-descriptor at pos=${this.tokenizer.position}`);
if (zipHeader.dataDescriptor) {
// await this.tokenizer.ignore(DataDescriptor.len);
const dataDescriptor = await this.tokenizer.readToken(DataDescriptor);
if (dataDescriptor.signature !== 0x08074b50) {
throw new Error(`Expected data-descriptor-signature at position ${this.tokenizer.position - DataDescriptor.len}`);
}
}
} while (!stop);
}
async iterateOverCentralDirectory(entries, fileCb) {
for (const fileHeader of entries) {
const next = fileCb(fileHeader);
if (next.handler) {
this.tokenizer.setPosition(fileHeader.relativeOffsetOfLocalHeader);
const zipHeader = await this.readLocalFileHeader();
if (zipHeader) {
await this.tokenizer.ignore(zipHeader.extraFieldLength);
const fileData = new Uint8Array(fileHeader.compressedSize);
await this.tokenizer.readBuffer(fileData);
await this.inflate(zipHeader, fileData, next.handler);
}
}
if (next.stop)
break;
}
}
inflate(zipHeader, fileData, cb) {
if (zipHeader.compressedMethod === 0) {
return cb(fileData);
}
debug(`Decompress filename=${zipHeader.filename}, compressed-size=${fileData.length}`);
const uncompressedData = decompressSync(fileData);
return cb(uncompressedData);
}
async readLocalFileHeader() {
const signature = await this.tokenizer.peekToken(UINT32_LE);
if (signature === Signature.LocalFileHeader) {
const header = await this.tokenizer.readToken(LocalFileHeaderToken);
header.filename = await this.tokenizer.readToken(new StringType(header.filenameLength, 'utf-8'));
return header;
}
if (signature === Signature.CentralFileHeader) {
return false;
}
if (signature === 0xE011CFD0) {
throw new Error('Encrypted ZIP');
}
throw new Error('Unexpected signature');
}
}
function indexOf(buffer, portion) {
const bufferLength = buffer.length;
const portionLength = portion.length;
// Return -1 if the portion is longer than the buffer
if (portionLength > bufferLength)
return -1;
// Search for the portion in the buffer
for (let i = 0; i <= bufferLength - portionLength; i++) {
let found = true;
for (let j = 0; j < portionLength; j++) {
if (buffer[i + j] !== portion[j]) {
found = false;
break;
}
}
if (found) {
return i; // Return the starting offset
}
}
return -1; // Not found
}
function mergeArrays(chunks) {
// Concatenate chunks into a single Uint8Array
const totalLength = chunks.reduce((acc, curr) => acc + curr.length, 0);
const mergedArray = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
mergedArray.set(chunk, offset);
offset += chunk.length;
}
return mergedArray;
}