update
This commit is contained in:
parent
6ac083b114
commit
c8f5379289
2938 changed files with 326904 additions and 0 deletions
17
Dockerfile
Normal file
17
Dockerfile
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Use an official Python runtime as a parent image
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set the working directory in the container
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the current directory contents into the container at /app
|
||||
COPY . /app
|
||||
|
||||
# Install any needed packages specified in requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Make port 8000 available to the world outside this container
|
||||
EXPOSE 8000
|
||||
|
||||
# Run the command to start the FastAPI app
|
||||
CMD ["uvicorn", "backend.main:app", "--host", "0.0.0.0", "--port", "8000", "--log-config", "backend/logging_config.json"]
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2024 hhf technology
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
271
backend/crud.py
Normal file
271
backend/crud.py
Normal file
|
@ -0,0 +1,271 @@
|
|||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import func
|
||||
from backend.models import User, TimeEntry
|
||||
import backend.schemas as schemas
|
||||
from backend.exceptions import UserNotFoundException, UserAlreadyClockedInException, NoClockInFoundException, UserAlreadyClockedOutException, AdminUserAlreadyExists, UserAlreadyExists
|
||||
import logging
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import bcrypt
|
||||
logger = logging.getLogger("uvicorn")
|
||||
|
||||
|
||||
def _get_period_summary(time_entries):
|
||||
grouped_data = {}
|
||||
total_hours = 0
|
||||
days_worked = 0
|
||||
|
||||
for entry in time_entries:
|
||||
# Ensure clock_in and clock_out are timezone-aware
|
||||
clock_in_time = entry.clock_in
|
||||
if clock_in_time and clock_in_time.tzinfo is None:
|
||||
clock_in_time = clock_in_time.replace(tzinfo=timezone.utc)
|
||||
|
||||
clock_out_time = entry.clock_out
|
||||
if clock_out_time and clock_out_time.tzinfo is None:
|
||||
clock_out_time = clock_out_time.replace(tzinfo=timezone.utc)
|
||||
|
||||
entry_date = clock_in_time.date().strftime("%Y-%m-%d")
|
||||
|
||||
if entry_date not in grouped_data:
|
||||
grouped_data[entry_date] = {
|
||||
"clock_in": clock_in_time,
|
||||
"clock_out": None
|
||||
}
|
||||
|
||||
if clock_out_time:
|
||||
grouped_data[entry_date]["clock_out"] = clock_out_time
|
||||
# Calculate total time for the day
|
||||
total_time = clock_out_time - clock_in_time
|
||||
grouped_data[entry_date]["total_time"] = str(total_time)
|
||||
total_hours += total_time.total_seconds() / 3600
|
||||
days_worked += 1
|
||||
else:
|
||||
grouped_data[entry_date]["clock_out"] = None
|
||||
grouped_data[entry_date]["total_time"] = "N/A"
|
||||
|
||||
# Convert grouped data to list format
|
||||
result = []
|
||||
for date, data in grouped_data.items():
|
||||
result.append({
|
||||
"date": date,
|
||||
"clock_in": data["clock_in"],
|
||||
"clock_out": data["clock_out"],
|
||||
"total_time": data.get("total_time", "N/A")
|
||||
})
|
||||
|
||||
return {
|
||||
"total_hours": total_hours,
|
||||
"days_worked": days_worked,
|
||||
"entries": result
|
||||
}
|
||||
|
||||
def get_user_by_name(db: Session, name: str):
|
||||
return db.query(User).filter(func.lower(User.name) == name.lower()).first()
|
||||
|
||||
def create_user(db: Session, user: schemas.UserCreate, is_admin: bool = False, hashed_password: str = None):
|
||||
username_lower = user.name.lower()
|
||||
logger.info(f"create_user: A request has been made for {username_lower}")
|
||||
|
||||
# Check if the user already exists
|
||||
db_user = get_user_by_name(db, name=username_lower)
|
||||
if db_user:
|
||||
raise UserAlreadyExists(username_lower)
|
||||
|
||||
# Determine the password to store (hashed password for admin)
|
||||
password_to_store = hashed_password if hashed_password else None
|
||||
|
||||
new_user = User(name=username_lower, password=password_to_store, is_admin=is_admin)
|
||||
db.add(new_user)
|
||||
|
||||
try:
|
||||
db.commit()
|
||||
db.refresh(new_user)
|
||||
except IntegrityError:
|
||||
db.rollback()
|
||||
raise UserAlreadyExists(username_lower)
|
||||
|
||||
return new_user
|
||||
|
||||
def clock_in(db: Session, user: str, time: datetime = None, note: str = None):
|
||||
logger.info(f"clock_in: A request has been made for {user}")
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
# Use provided time or default to current UTC time
|
||||
if time is None:
|
||||
time = datetime.now(timezone.utc)
|
||||
else:
|
||||
# Ensure time is timezone-aware and in UTC
|
||||
if time.tzinfo is None:
|
||||
time = time.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time = time.astimezone(timezone.utc)
|
||||
|
||||
today = time.date()
|
||||
time_entry = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
func.date(TimeEntry.clock_in) == today
|
||||
).first()
|
||||
|
||||
if time_entry:
|
||||
if time_entry.clock_out:
|
||||
raise UserAlreadyClockedOutException(user.capitalize())
|
||||
else:
|
||||
raise UserAlreadyClockedInException()
|
||||
|
||||
new_entry = TimeEntry(
|
||||
user_id=db_user.id,
|
||||
clock_in=time,
|
||||
clock_in_note=note
|
||||
)
|
||||
db.add(new_entry)
|
||||
db.commit()
|
||||
db.refresh(new_entry)
|
||||
return new_entry
|
||||
|
||||
def clock_out(db: Session, user: str, time: datetime = None, note: str = None):
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
if time is None:
|
||||
time = datetime.now(timezone.utc)
|
||||
else:
|
||||
if time.tzinfo is None:
|
||||
time = time.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time = time.astimezone(timezone.utc)
|
||||
|
||||
today = time.date()
|
||||
time_entry = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
func.date(TimeEntry.clock_in) == today
|
||||
).first()
|
||||
|
||||
if not time_entry:
|
||||
raise NoClockInFoundException()
|
||||
|
||||
time_entry.clock_out = time
|
||||
time_entry.clock_out_note = note
|
||||
db.commit()
|
||||
db.refresh(time_entry)
|
||||
return time_entry
|
||||
|
||||
def get_time_for_pay_period(db: Session, user: str):
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
# Define a reference pay period start date (a known payday)
|
||||
reference_pay_period_start = datetime(2023, 1, 6, tzinfo=timezone.utc).date() # Update this date as needed
|
||||
|
||||
today = datetime.now(timezone.utc).date()
|
||||
days_since_reference = (today - reference_pay_period_start).days
|
||||
pay_periods_since_reference = days_since_reference // 14
|
||||
current_pay_period_start = reference_pay_period_start + timedelta(days=pay_periods_since_reference * 14)
|
||||
current_pay_period_end = current_pay_period_start + timedelta(days=13) # 14 days total
|
||||
|
||||
# Adjust for future dates if necessary
|
||||
if today < current_pay_period_start:
|
||||
current_pay_period_start -= timedelta(days=14)
|
||||
current_pay_period_end -= timedelta(days=14)
|
||||
|
||||
time_entries = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
func.date(TimeEntry.clock_in) >= current_pay_period_start,
|
||||
func.date(TimeEntry.clock_in) <= current_pay_period_end
|
||||
).all()
|
||||
|
||||
return _get_period_summary(time_entries)
|
||||
|
||||
def get_time_for_month(db: Session, user: str):
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
today = datetime.now(timezone.utc)
|
||||
start_date = today.replace(day=1)
|
||||
|
||||
time_entries = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
TimeEntry.clock_in >= start_date
|
||||
).all()
|
||||
|
||||
return _get_period_summary(time_entries)
|
||||
|
||||
def get_time_for_current_week(db: Session, user: str):
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
today = datetime.now()
|
||||
start_of_week = today - timedelta(days=(today.weekday() + 1) % 7)
|
||||
end_of_week = start_of_week + timedelta(days=6)
|
||||
|
||||
time_entries = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
TimeEntry.clock_in >= start_of_week,
|
||||
TimeEntry.clock_in <= end_of_week
|
||||
).all()
|
||||
|
||||
return _get_period_summary(time_entries)
|
||||
|
||||
def get_user_status(db: Session, user: str) -> str:
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
recent_entry = db.query(TimeEntry).filter(TimeEntry.user_id == db_user.id).order_by(
|
||||
TimeEntry.clock_in.desc()).first()
|
||||
|
||||
if recent_entry and recent_entry.clock_out is None:
|
||||
return "in"
|
||||
else:
|
||||
return "out"
|
||||
|
||||
def delete_today_entry(db: Session, user: str):
|
||||
db_user = get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise UserNotFoundException(user)
|
||||
|
||||
today = datetime.now(timezone.utc).date()
|
||||
time_entry = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
func.date(TimeEntry.clock_in) == today
|
||||
).first()
|
||||
|
||||
if time_entry:
|
||||
db.delete(time_entry)
|
||||
db.commit()
|
||||
return True # Indicate that the entry was deleted
|
||||
else:
|
||||
raise NoClockInFoundException() # No entry found for today
|
||||
|
||||
|
||||
def is_clocked_in_today(db: Session, user: str):
|
||||
db_user = db.query(User).filter(User.name == user).first()
|
||||
if not db_user:
|
||||
return False
|
||||
|
||||
today = datetime.now(timezone.utc).date()
|
||||
time_entry = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
func.date(TimeEntry.clock_in) == today
|
||||
).first()
|
||||
|
||||
return time_entry is not None
|
||||
|
||||
def delete_user_by_name(db: Session, username: str):
|
||||
db_user = db.query(User).filter(User.name == username).first()
|
||||
if db_user:
|
||||
db.delete(db_user)
|
||||
db.commit()
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_users(db: Session):
|
||||
users = db.query(User).all()
|
||||
return users
|
17
backend/database.py
Normal file
17
backend/database.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = "sqlite:///./backend/data/time_tracking.db"
|
||||
|
||||
engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False})
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
38
backend/exceptions.py
Normal file
38
backend/exceptions.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import logging
|
||||
logger = logging.getLogger("uvicorn")
|
||||
|
||||
class UserNotFoundException(Exception):
|
||||
def __init__(self, username: str):
|
||||
self.message = f"User '{username}' not found"
|
||||
super().__init__(self.message)
|
||||
logger.error(self.message)
|
||||
|
||||
|
||||
class UserAlreadyClockedInException(Exception):
|
||||
def __init__(self, message: str = "User already clocked in today"):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
logger.error(self.message)
|
||||
|
||||
|
||||
class NoClockInFoundException(Exception):
|
||||
def __init__(self, message: str = "No clock-in found for today"):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
logger.error(self.message)
|
||||
|
||||
|
||||
class UserAlreadyClockedOutException(Exception):
|
||||
def __init__(self, user: str):
|
||||
self.message = f"{user} has already clocked out for the day."
|
||||
super().__init__(self.message)
|
||||
|
||||
class AdminUserAlreadyExists(Exception):
|
||||
def __init__(self, user: str):
|
||||
self.message = f"Admin user has already been created, {user} has not been created."
|
||||
super().__init__(self.message)
|
||||
|
||||
class UserAlreadyExists(Exception):
|
||||
def __init__(self, user: str):
|
||||
self.message = f"{user} already exists"
|
||||
super().__init__(self.message)
|
40
backend/logging_config.json
Normal file
40
backend/logging_config.json
Normal file
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"version": 1,
|
||||
"disable_existing_loggers": false,
|
||||
"formatters": {
|
||||
"default": {
|
||||
"()": "colorlog.ColoredFormatter",
|
||||
"format": "%(log_color)s%(asctime)s - %(levelname)s - %(message)s",
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
"log_colors": {
|
||||
"DEBUG": "bold_blue",
|
||||
"INFO": "bold_green",
|
||||
"WARNING": "bold_yellow",
|
||||
"ERROR": "bold_red",
|
||||
"CRITICAL": "bold_purple"
|
||||
}
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout"
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"uvicorn": {
|
||||
"handlers": ["default"],
|
||||
"level": "INFO",
|
||||
"propagate": false
|
||||
},
|
||||
"uvicorn.error": {
|
||||
"level": "INFO"
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["default"],
|
||||
"level": "INFO",
|
||||
"propagate": false
|
||||
}
|
||||
}
|
||||
}
|
302
backend/main.py
Normal file
302
backend/main.py
Normal file
|
@ -0,0 +1,302 @@
|
|||
from fastapi import FastAPI, Depends, HTTPException, Request, status
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from fastapi.responses import RedirectResponse, JSONResponse, HTMLResponse, FileResponse
|
||||
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
||||
import secrets
|
||||
import logging
|
||||
from backend.database import get_db, engine
|
||||
from backend.models import Base, TimeEntry
|
||||
import backend.crud as crud
|
||||
import backend.schemas as schemas
|
||||
from datetime import datetime, timezone
|
||||
from backend.exceptions import UserNotFoundException, UserAlreadyClockedInException, NoClockInFoundException, UserAlreadyClockedOutException, UserAlreadyExists
|
||||
import bcrypt
|
||||
import os
|
||||
from sqlalchemy import func
|
||||
from dateutil.parser import isoparse
|
||||
|
||||
|
||||
# Basic Auth Setup
|
||||
security = HTTPBasic()
|
||||
|
||||
app = FastAPI()
|
||||
app.mount("/frontend", StaticFiles(directory="frontend"), name="frontend")
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# Use Uvicorn's logger
|
||||
logger = logging.getLogger("uvicorn")
|
||||
|
||||
# Retrieve username from environment variables
|
||||
ENV_USERNAME = os.getenv("ADMIN_USERNAME", "admin")
|
||||
|
||||
# Ensure the admin user exists with the hashed password
|
||||
def ensure_admin_user_exists(db: Session):
|
||||
admin_user = crud.get_user_by_name(db, name=ENV_USERNAME)
|
||||
if not admin_user:
|
||||
# Retrieve password from environment variables
|
||||
env_password = os.getenv("ADMIN_PASSWORD")
|
||||
|
||||
# Generate a random password if ADMIN_PASSWORD is not set
|
||||
if not env_password:
|
||||
env_password = secrets.token_urlsafe(16) # Random secure password
|
||||
logger.warning(f"\n\nGenerated admin password: \n{env_password}\n\n") # Log this for initial use
|
||||
|
||||
# Hash the password
|
||||
hashed_env_password = bcrypt.hashpw(env_password.encode('utf-8'), bcrypt.gensalt())
|
||||
|
||||
# Create the admin user with the hashed password
|
||||
admin_data = schemas.UserCreate(name=ENV_USERNAME)
|
||||
crud.create_user(db=db, user=admin_data, is_admin=True, hashed_password=hashed_env_password)
|
||||
|
||||
@app.on_event("startup")
|
||||
def on_startup():
|
||||
# Ensure the admin user exists on startup
|
||||
db = next(get_db())
|
||||
ensure_admin_user_exists(db)
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
if not hashed_password:
|
||||
return False # If no password is set, it's an automatic failure
|
||||
return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password)
|
||||
|
||||
def get_current_username(credentials: HTTPBasicCredentials = Depends(security), db: Session = Depends(get_db)):
|
||||
user = crud.get_user_by_name(db, name=credentials.username)
|
||||
|
||||
# Check if the user is the admin and verify the password
|
||||
if credentials.username == ENV_USERNAME:
|
||||
if not user or not verify_password(credentials.password, user.password):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Basic"},
|
||||
)
|
||||
else:
|
||||
# Non-admin users should not have a password
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not found",
|
||||
headers={"WWW-Authenticate": "Basic"},
|
||||
)
|
||||
|
||||
return credentials.username
|
||||
|
||||
|
||||
# Custom Exception Handlers
|
||||
|
||||
@app.exception_handler(UserAlreadyExists)
|
||||
async def user_already_exists_exception_handler(request: Request, exc: UserAlreadyExists):
|
||||
logger.error(f"UserAlreadyExists: {exc.message}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={"message": f"User '{exc.message}' already exists."},
|
||||
)
|
||||
|
||||
@app.exception_handler(UserAlreadyClockedOutException)
|
||||
async def user_already_clocked_out_exception_handler(request: Request, exc: UserAlreadyClockedOutException):
|
||||
logger.error(f"UserAlreadyClockedOutException: {exc.message}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={"message": exc.message},
|
||||
)
|
||||
|
||||
@app.exception_handler(UserNotFoundException)
|
||||
async def user_not_found_exception_handler(request: Request, exc: UserNotFoundException):
|
||||
logger.error(f"UserNotFoundException: {exc.message}")
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content={"message": exc.message},
|
||||
)
|
||||
|
||||
@app.exception_handler(UserAlreadyClockedInException)
|
||||
async def user_already_clocked_in_exception_handler(request: Request, exc: UserAlreadyClockedInException):
|
||||
logger.error(f"UserAlreadyClockedInException: {exc.message}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={"message": exc.message},
|
||||
)
|
||||
|
||||
@app.exception_handler(NoClockInFoundException)
|
||||
async def no_clock_in_found_exception_handler(request: Request, exc: NoClockInFoundException):
|
||||
logger.error(f"NoClockInFoundException: {exc.message}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={"message": exc.message},
|
||||
)
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def custom_exception_handler(request: Request, exc: Exception):
|
||||
logger.error(f"Unhandled Exception: {exc}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"message": "Internal Server Error"},
|
||||
)
|
||||
|
||||
@app.get("/")
|
||||
def read_root():
|
||||
with open("frontend/index.html") as f:
|
||||
return HTMLResponse(content=f.read())
|
||||
|
||||
@app.get("/favicon.ico", include_in_schema=False)
|
||||
async def favicon():
|
||||
return FileResponse("static/favicon.ico")
|
||||
|
||||
@app.post("/user/create", response_model=schemas.User)
|
||||
def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)):
|
||||
username_lower = user.name.lower()
|
||||
logger.info(f"create_user: A request has been made for {username_lower}")
|
||||
|
||||
try:
|
||||
# Try to create the user
|
||||
return crud.create_user(db=db, user=user)
|
||||
except UserAlreadyExists as e:
|
||||
# Handle the specific exception and provide a user-friendly message
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except IntegrityError:
|
||||
# Handle the race condition
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail="User already registered")
|
||||
|
||||
@app.post("/time/{user}/in")
|
||||
def clock_in(user: str, time: str = None, note: str = None, db: Session = Depends(get_db)):
|
||||
username_lower = user.lower()
|
||||
logger.info(f"clock_in: A request has been made for {username_lower}")
|
||||
|
||||
# Parse the time parameter if provided
|
||||
if time:
|
||||
try:
|
||||
parsed_time = datetime.fromisoformat(time)
|
||||
if parsed_time.tzinfo is None:
|
||||
parsed_time = parsed_time.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
parsed_time = parsed_time.astimezone(timezone.utc)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid time format. Use ISO 8601 format.")
|
||||
else:
|
||||
parsed_time = None # The CRUD function will handle defaulting to current UTC time
|
||||
|
||||
return crud.clock_in(db=db, user=username_lower, time=parsed_time, note=note)
|
||||
|
||||
@app.post("/time/{user}/out")
|
||||
def clock_out(user: str, time: str = None, note: str = None, db: Session = Depends(get_db)):
|
||||
username_lower = user.lower()
|
||||
|
||||
if time:
|
||||
try:
|
||||
parsed_time = datetime.fromisoformat(time)
|
||||
if parsed_time.tzinfo is None:
|
||||
parsed_time = parsed_time.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
parsed_time = parsed_time.astimezone(timezone.utc)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid time format. Use ISO 8601 format.")
|
||||
else:
|
||||
parsed_time = None
|
||||
|
||||
return crud.clock_out(db=db, user=username_lower, time=parsed_time, note=note)
|
||||
|
||||
@app.get("/time/{user}/recall/payperiod", response_model=schemas.PeriodSummary)
|
||||
def get_pay_period(user: str, db: Session = Depends(get_db)):
|
||||
username_lower = user.lower()
|
||||
return crud.get_time_for_pay_period(db=db, user=username_lower)
|
||||
|
||||
@app.get("/time/{user}/recall/month", response_model=schemas.PeriodSummary)
|
||||
def get_time_for_month(user: str, db: Session = Depends(get_db)):
|
||||
username_lower = user.lower()
|
||||
period_summary = crud.get_time_for_month(db=db, user=username_lower)
|
||||
return period_summary
|
||||
|
||||
@app.get("/time/{user}/recall/week", response_model=schemas.PeriodSummary)
|
||||
def get_current_week(user: str, db: Session = Depends(get_db)):
|
||||
username_lower = user.lower()
|
||||
return crud.get_time_for_current_week(db=db, user=username_lower)
|
||||
|
||||
@app.get("/user/status/{user}")
|
||||
def get_user_status(user: str, db: Session = Depends(get_db)):
|
||||
username_lower = user.lower()
|
||||
|
||||
try:
|
||||
return crud.get_user_status(db=db, user=username_lower)
|
||||
except UserNotFoundException as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
@app.delete("/time/{user}/today", response_model=schemas.Message)
|
||||
def delete_today_time_entry(user: str, db: Session = Depends(get_db)):
|
||||
try:
|
||||
crud.delete_today_entry(db, user.lower())
|
||||
return {"message": f"Today's clock-in and clock-out times for {user} have been deleted."}
|
||||
except UserNotFoundException as e:
|
||||
return {"message": str(e)}
|
||||
except NoClockInFoundException:
|
||||
return {"message": "No clock-in found for today."}
|
||||
|
||||
@app.get("/time/{user}/is_clocked_in_today")
|
||||
def check_clocked_in_today(user: str, db: Session = Depends(get_db)):
|
||||
if not crud.is_clocked_in_today(db, user.lower()):
|
||||
return {"clocked_in_today": False}
|
||||
return {"clocked_in_today": True}
|
||||
|
||||
@app.delete("/user/{username}", response_model=schemas.Message)
|
||||
def delete_user(username: str, db: Session = Depends(get_db), admin_username: str = Depends(get_current_username)):
|
||||
# Only the admin should be able to delete a user
|
||||
if admin_username != ENV_USERNAME:
|
||||
raise HTTPException(status_code=403, detail="Only the admin can delete users")
|
||||
username = username.lower()
|
||||
success = crud.delete_user_by_name(db=db, username=username)
|
||||
if success:
|
||||
return {"message": f"User '{username}' has been deleted."}
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
@app.get("/edit", response_class=HTMLResponse)
|
||||
def edit_page():
|
||||
with open("frontend/edit.html") as f:
|
||||
return HTMLResponse(content=f.read())
|
||||
|
||||
@app.get("/users")
|
||||
def get_users(db: Session = Depends(get_db)):
|
||||
users = crud.get_users(db=db)
|
||||
return [{"name": user.name} for user in users]
|
||||
|
||||
@app.post("/time/{user}/edit")
|
||||
def edit_clock_times(user: str, data: schemas.EditClockTimes, db: Session = Depends(get_db)):
|
||||
db_user = crud.get_user_by_name(db, user)
|
||||
if not db_user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
try:
|
||||
# Parse the date string into a date object
|
||||
target_date = datetime.strptime(data.date, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid date format. Use YYYY-MM-DD.")
|
||||
|
||||
# Query for the time entry on the specified date
|
||||
time_entry = db.query(TimeEntry).filter(
|
||||
TimeEntry.user_id == db_user.id,
|
||||
func.date(TimeEntry.clock_in) == target_date
|
||||
).first()
|
||||
|
||||
if not time_entry:
|
||||
raise HTTPException(status_code=404, detail="No time entry found for this date")
|
||||
|
||||
if data.clock_in_time:
|
||||
# Parse the ISO datetime string with timezone information
|
||||
time_entry.clock_in = isoparse(data.clock_in_time)
|
||||
if time_entry.clock_in.tzinfo is None:
|
||||
time_entry.clock_in = time_entry.clock_in.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time_entry.clock_in = time_entry.clock_in.astimezone(timezone.utc)
|
||||
if data.clock_out_time:
|
||||
time_entry.clock_out = isoparse(data.clock_out_time)
|
||||
if time_entry.clock_out.tzinfo is None:
|
||||
time_entry.clock_out = time_entry.clock_out.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time_entry.clock_out = time_entry.clock_out.astimezone(timezone.utc)
|
||||
|
||||
db.commit()
|
||||
db.refresh(time_entry)
|
||||
|
||||
return {"message": "Clock times updated successfully"}
|
22
backend/models.py
Normal file
22
backend/models.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Boolean
|
||||
from sqlalchemy.orm import relationship
|
||||
from backend.database import Base
|
||||
from datetime import datetime
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String, unique=True, index=True)
|
||||
password = Column(String)
|
||||
is_admin = Column(Boolean, default=False)
|
||||
|
||||
class TimeEntry(Base):
|
||||
__tablename__ = "time_entries"
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
user_id = Column(Integer, ForeignKey("users.id"))
|
||||
clock_in = Column(DateTime(timezone=True), default=None)
|
||||
clock_out = Column(DateTime(timezone=True), default=None)
|
||||
clock_in_note = Column(String, nullable=True)
|
||||
clock_out_note = Column(String, nullable=True)
|
||||
|
||||
user = relationship("User")
|
64
backend/schemas.py
Normal file
64
backend/schemas.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
from datetime import datetime, timedelta
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.astimezone(timezone.utc).isoformat(timespec='microseconds').replace('+00:00', 'Z')
|
||||
}
|
||||
|
||||
class UserBase(BaseModel):
|
||||
name: str
|
||||
|
||||
class UserCreate(UserBase):
|
||||
name: str
|
||||
password: Optional[str] = None # Make the password optional
|
||||
|
||||
class User(BaseModel):
|
||||
name: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class TimeEntry(BaseModel):
|
||||
clock_in: datetime | None
|
||||
clock_out: datetime | None
|
||||
clock_in_note: str | None
|
||||
clock_out_note: str | None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class DailyTime(BaseModel):
|
||||
date: str
|
||||
clock_in: TimeEntry
|
||||
clock_out: TimeEntry
|
||||
total_time: str # New field to represent the total time worked that day
|
||||
|
||||
class TimeEntryResponse(BaseModel):
|
||||
date: str
|
||||
clock_in: Optional[datetime]
|
||||
clock_out: Optional[datetime]
|
||||
total_time: str
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat()
|
||||
}
|
||||
|
||||
class PeriodSummary(BaseModel):
|
||||
total_hours: float
|
||||
days_worked: int
|
||||
entries: List[TimeEntryResponse]
|
||||
|
||||
class Message(BaseModel):
|
||||
message: str
|
||||
|
||||
class EditClockTimes(BaseModel):
|
||||
date: str # Keep as string since we're using it to query
|
||||
clock_in_time: Optional[str] = None # Expect ISO datetime string
|
||||
clock_out_time: Optional[str] = None
|
83
backend/tests/generate_test_data.py
Normal file
83
backend/tests/generate_test_data.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import requests
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# API configuration
|
||||
#API_URL = "http://localhost:8000"
|
||||
API_URL = "https://time-api.smithserver.app"
|
||||
|
||||
TEST_USER = "testuser"
|
||||
#PASSWORD = "password123" # If the user creation requires a password
|
||||
|
||||
# Generate random clock-in and clock-out times
|
||||
def generate_random_time(date):
|
||||
# Random clock-in time between 8 AM and 10 AM
|
||||
clock_in_hour = random.randint(8, 10)
|
||||
clock_in_minute = random.randint(0, 59)
|
||||
clock_in_time = date.replace(hour=clock_in_hour, minute=clock_in_minute, second=0)
|
||||
|
||||
# Random clock-out time between 4 PM and 6 PM
|
||||
clock_out_hour = random.randint(16, 18)
|
||||
clock_out_minute = random.randint(0, 59)
|
||||
clock_out_time = date.replace(hour=clock_out_hour, minute=clock_out_minute, second=0)
|
||||
|
||||
return clock_in_time, clock_out_time
|
||||
|
||||
# Create a new user
|
||||
def create_user():
|
||||
url = f"{API_URL}/user/create"
|
||||
data = {
|
||||
"name": TEST_USER,
|
||||
#"password": PASSWORD # If the API requires a password for user creation
|
||||
}
|
||||
response = requests.post(url, json=data, verify=False) # Disable TLS verification
|
||||
if response.status_code == 200:
|
||||
print(f"User '{TEST_USER}' created successfully.")
|
||||
elif response.status_code == 400 and 'already exists' in response.text:
|
||||
print(f"User '{TEST_USER}' already exists.")
|
||||
else:
|
||||
print(f"Error creating user: {response.status_code} - {response.text}")
|
||||
return False
|
||||
return True
|
||||
|
||||
# Clock in and clock out using the API
|
||||
def clock_in_out(date, clock_in_time, clock_out_time):
|
||||
# Format times for API
|
||||
clock_in_url = f"{API_URL}/time/{TEST_USER}/in"
|
||||
clock_out_url = f"{API_URL}/time/{TEST_USER}/out"
|
||||
|
||||
# Perform clock-in
|
||||
clock_in_response = requests.post(clock_in_url, params={
|
||||
"note": f"Clocked in at {clock_in_time.strftime('%Y-%m-%d %H:%M:%S')}",
|
||||
"clock_in_time": clock_in_time.isoformat()
|
||||
}, verify=False) # Disable TLS verification
|
||||
if clock_in_response.status_code != 200:
|
||||
print(f"Error clocking in for {clock_in_time.date()}: {clock_in_response.status_code} - {clock_in_response.text}")
|
||||
return
|
||||
|
||||
# Perform clock-out
|
||||
clock_out_response = requests.post(clock_out_url, params={
|
||||
"note": f"Clocked out at {clock_out_time.strftime('%Y-%m-%d %H:%M:%S')}",
|
||||
"clock_out_time": clock_out_time.isoformat()
|
||||
}, verify=False) # Disable TLS verification
|
||||
if clock_out_response.status_code != 200:
|
||||
print(f"Error clocking out for {clock_out_time.date()}: {clock_out_response.status_code} - {clock_out_response.text}")
|
||||
return
|
||||
|
||||
print(f"Successfully clocked in and out for {clock_in_time.date()}.")
|
||||
|
||||
# Main function to create user and generate data
|
||||
def main():
|
||||
if not create_user():
|
||||
return
|
||||
|
||||
# Generate data for the past 45 days
|
||||
for i in range(45):
|
||||
date = datetime.now() - timedelta(days=i)
|
||||
clock_in_time, clock_out_time = generate_random_time(date)
|
||||
|
||||
# Perform clock-in and clock-out operations
|
||||
clock_in_out(date, clock_in_time, clock_out_time)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
15
docker-compose.yaml
Normal file
15
docker-compose.yaml
Normal file
|
@ -0,0 +1,15 @@
|
|||
version: '3.8'
|
||||
|
||||
services:
|
||||
timetracker_app:
|
||||
image: timetracker-x64-linux
|
||||
pull_policy: always
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "8000:8000" # Map host port 8000 to container port 8000
|
||||
environment:
|
||||
- APP_ENV=production
|
||||
volumes:
|
||||
- timetracker:/app/backend/data
|
||||
volumes:
|
||||
timetracker:
|
956
frontend/dist/styles.css
vendored
Normal file
956
frontend/dist/styles.css
vendored
Normal file
|
@ -0,0 +1,956 @@
|
|||
/* Tailwind CSS */
|
||||
|
||||
/* ! tailwindcss v3.4.11 | MIT License | https://tailwindcss.com */
|
||||
|
||||
/*
|
||||
1. Prevent padding and border from affecting element width. (https://github.com/mozdevs/cssremedy/issues/4)
|
||||
2. Allow adding a border to an element by just adding a border-width. (https://github.com/tailwindcss/tailwindcss/pull/116)
|
||||
*/
|
||||
|
||||
*,
|
||||
::before,
|
||||
::after {
|
||||
box-sizing: border-box;
|
||||
/* 1 */
|
||||
border-width: 0;
|
||||
/* 2 */
|
||||
border-style: solid;
|
||||
/* 2 */
|
||||
border-color: #e5e7eb;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
::before,
|
||||
::after {
|
||||
--tw-content: '';
|
||||
}
|
||||
|
||||
/*
|
||||
1. Use a consistent sensible line-height in all browsers.
|
||||
2. Prevent adjustments of font size after orientation changes in iOS.
|
||||
3. Use a more readable tab size.
|
||||
4. Use the user's configured `sans` font-family by default.
|
||||
5. Use the user's configured `sans` font-feature-settings by default.
|
||||
6. Use the user's configured `sans` font-variation-settings by default.
|
||||
7. Disable tap highlights on iOS
|
||||
*/
|
||||
|
||||
html,
|
||||
:host {
|
||||
line-height: 1.5;
|
||||
/* 1 */
|
||||
-webkit-text-size-adjust: 100%;
|
||||
/* 2 */
|
||||
-moz-tab-size: 4;
|
||||
/* 3 */
|
||||
-o-tab-size: 4;
|
||||
tab-size: 4;
|
||||
/* 3 */
|
||||
font-family: ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||
/* 4 */
|
||||
font-feature-settings: normal;
|
||||
/* 5 */
|
||||
font-variation-settings: normal;
|
||||
/* 6 */
|
||||
-webkit-tap-highlight-color: transparent;
|
||||
/* 7 */
|
||||
}
|
||||
|
||||
/*
|
||||
1. Remove the margin in all browsers.
|
||||
2. Inherit line-height from `html` so users can set them as a class directly on the `html` element.
|
||||
*/
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
/* 1 */
|
||||
line-height: inherit;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
/*
|
||||
1. Add the correct height in Firefox.
|
||||
2. Correct the inheritance of border color in Firefox. (https://bugzilla.mozilla.org/show_bug.cgi?id=190655)
|
||||
3. Ensure horizontal rules are visible by default.
|
||||
*/
|
||||
|
||||
hr {
|
||||
height: 0;
|
||||
/* 1 */
|
||||
color: inherit;
|
||||
/* 2 */
|
||||
border-top-width: 1px;
|
||||
/* 3 */
|
||||
}
|
||||
|
||||
/*
|
||||
Add the correct text decoration in Chrome, Edge, and Safari.
|
||||
*/
|
||||
|
||||
abbr:where([title]) {
|
||||
-webkit-text-decoration: underline dotted;
|
||||
text-decoration: underline dotted;
|
||||
}
|
||||
|
||||
/*
|
||||
Remove the default font size and weight for headings.
|
||||
*/
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
font-size: inherit;
|
||||
font-weight: inherit;
|
||||
}
|
||||
|
||||
/*
|
||||
Reset links to optimize for opt-in styling instead of opt-out.
|
||||
*/
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
text-decoration: inherit;
|
||||
}
|
||||
|
||||
/*
|
||||
Add the correct font weight in Edge and Safari.
|
||||
*/
|
||||
|
||||
b,
|
||||
strong {
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Use the user's configured `mono` font-family by default.
|
||||
2. Use the user's configured `mono` font-feature-settings by default.
|
||||
3. Use the user's configured `mono` font-variation-settings by default.
|
||||
4. Correct the odd `em` font sizing in all browsers.
|
||||
*/
|
||||
|
||||
code,
|
||||
kbd,
|
||||
samp,
|
||||
pre {
|
||||
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
/* 1 */
|
||||
font-feature-settings: normal;
|
||||
/* 2 */
|
||||
font-variation-settings: normal;
|
||||
/* 3 */
|
||||
font-size: 1em;
|
||||
/* 4 */
|
||||
}
|
||||
|
||||
/*
|
||||
Add the correct font size in all browsers.
|
||||
*/
|
||||
|
||||
small {
|
||||
font-size: 80%;
|
||||
}
|
||||
|
||||
/*
|
||||
Prevent `sub` and `sup` elements from affecting the line height in all browsers.
|
||||
*/
|
||||
|
||||
sub,
|
||||
sup {
|
||||
font-size: 75%;
|
||||
line-height: 0;
|
||||
position: relative;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
sub {
|
||||
bottom: -0.25em;
|
||||
}
|
||||
|
||||
sup {
|
||||
top: -0.5em;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Remove text indentation from table contents in Chrome and Safari. (https://bugs.chromium.org/p/chromium/issues/detail?id=999088, https://bugs.webkit.org/show_bug.cgi?id=201297)
|
||||
2. Correct table border color inheritance in all Chrome and Safari. (https://bugs.chromium.org/p/chromium/issues/detail?id=935729, https://bugs.webkit.org/show_bug.cgi?id=195016)
|
||||
3. Remove gaps between table borders by default.
|
||||
*/
|
||||
|
||||
table {
|
||||
text-indent: 0;
|
||||
/* 1 */
|
||||
border-color: inherit;
|
||||
/* 2 */
|
||||
border-collapse: collapse;
|
||||
/* 3 */
|
||||
}
|
||||
|
||||
/*
|
||||
1. Change the font styles in all browsers.
|
||||
2. Remove the margin in Firefox and Safari.
|
||||
3. Remove default padding in all browsers.
|
||||
*/
|
||||
|
||||
button,
|
||||
input,
|
||||
optgroup,
|
||||
select,
|
||||
textarea {
|
||||
font-family: inherit;
|
||||
/* 1 */
|
||||
font-feature-settings: inherit;
|
||||
/* 1 */
|
||||
font-variation-settings: inherit;
|
||||
/* 1 */
|
||||
font-size: 100%;
|
||||
/* 1 */
|
||||
font-weight: inherit;
|
||||
/* 1 */
|
||||
line-height: inherit;
|
||||
/* 1 */
|
||||
letter-spacing: inherit;
|
||||
/* 1 */
|
||||
color: inherit;
|
||||
/* 1 */
|
||||
margin: 0;
|
||||
/* 2 */
|
||||
padding: 0;
|
||||
/* 3 */
|
||||
}
|
||||
|
||||
/*
|
||||
Remove the inheritance of text transform in Edge and Firefox.
|
||||
*/
|
||||
|
||||
button,
|
||||
select {
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Correct the inability to style clickable types in iOS and Safari.
|
||||
2. Remove default button styles.
|
||||
*/
|
||||
|
||||
button,
|
||||
input:where([type='button']),
|
||||
input:where([type='reset']),
|
||||
input:where([type='submit']) {
|
||||
-webkit-appearance: button;
|
||||
/* 1 */
|
||||
background-color: transparent;
|
||||
/* 2 */
|
||||
background-image: none;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
/*
|
||||
Use the modern Firefox focus style for all focusable elements.
|
||||
*/
|
||||
|
||||
:-moz-focusring {
|
||||
outline: auto;
|
||||
}
|
||||
|
||||
/*
|
||||
Remove the additional `:invalid` styles in Firefox. (https://github.com/mozilla/gecko-dev/blob/2f9eacd9d3d995c937b4251a5557d95d494c9be1/layout/style/res/forms.css#L728-L737)
|
||||
*/
|
||||
|
||||
:-moz-ui-invalid {
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
/*
|
||||
Add the correct vertical alignment in Chrome and Firefox.
|
||||
*/
|
||||
|
||||
progress {
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
/*
|
||||
Correct the cursor style of increment and decrement buttons in Safari.
|
||||
*/
|
||||
|
||||
::-webkit-inner-spin-button,
|
||||
::-webkit-outer-spin-button {
|
||||
height: auto;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Correct the odd appearance in Chrome and Safari.
|
||||
2. Correct the outline style in Safari.
|
||||
*/
|
||||
|
||||
[type='search'] {
|
||||
-webkit-appearance: textfield;
|
||||
/* 1 */
|
||||
outline-offset: -2px;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
/*
|
||||
Remove the inner padding in Chrome and Safari on macOS.
|
||||
*/
|
||||
|
||||
::-webkit-search-decoration {
|
||||
-webkit-appearance: none;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Correct the inability to style clickable types in iOS and Safari.
|
||||
2. Change font properties to `inherit` in Safari.
|
||||
*/
|
||||
|
||||
::-webkit-file-upload-button {
|
||||
-webkit-appearance: button;
|
||||
/* 1 */
|
||||
font: inherit;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
/*
|
||||
Add the correct display in Chrome and Safari.
|
||||
*/
|
||||
|
||||
summary {
|
||||
display: list-item;
|
||||
}
|
||||
|
||||
/*
|
||||
Removes the default spacing and border for appropriate elements.
|
||||
*/
|
||||
|
||||
blockquote,
|
||||
dl,
|
||||
dd,
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6,
|
||||
hr,
|
||||
figure,
|
||||
p,
|
||||
pre {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
fieldset {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
legend {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ol,
|
||||
ul,
|
||||
menu {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
/*
|
||||
Reset default styling for dialogs.
|
||||
*/
|
||||
|
||||
dialog {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
/*
|
||||
Prevent resizing textareas horizontally by default.
|
||||
*/
|
||||
|
||||
textarea {
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Reset the default placeholder opacity in Firefox. (https://github.com/tailwindlabs/tailwindcss/issues/3300)
|
||||
2. Set the default placeholder color to the user's configured gray 400 color.
|
||||
*/
|
||||
|
||||
input::-moz-placeholder, textarea::-moz-placeholder {
|
||||
opacity: 1;
|
||||
/* 1 */
|
||||
color: #9ca3af;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
input::placeholder,
|
||||
textarea::placeholder {
|
||||
opacity: 1;
|
||||
/* 1 */
|
||||
color: #9ca3af;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
/*
|
||||
Set the default cursor for buttons.
|
||||
*/
|
||||
|
||||
button,
|
||||
[role="button"] {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/*
|
||||
Make sure disabled buttons don't get the pointer cursor.
|
||||
*/
|
||||
|
||||
:disabled {
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
/*
|
||||
1. Make replaced elements `display: block` by default. (https://github.com/mozdevs/cssremedy/issues/14)
|
||||
2. Add `vertical-align: middle` to align replaced elements more sensibly by default. (https://github.com/jensimmons/cssremedy/issues/14#issuecomment-634934210)
|
||||
This can trigger a poorly considered lint error in some tools but is included by design.
|
||||
*/
|
||||
|
||||
img,
|
||||
svg,
|
||||
video,
|
||||
canvas,
|
||||
audio,
|
||||
iframe,
|
||||
embed,
|
||||
object {
|
||||
display: block;
|
||||
/* 1 */
|
||||
vertical-align: middle;
|
||||
/* 2 */
|
||||
}
|
||||
|
||||
/*
|
||||
Constrain images and videos to the parent width and preserve their intrinsic aspect ratio. (https://github.com/mozdevs/cssremedy/issues/14)
|
||||
*/
|
||||
|
||||
img,
|
||||
video {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
/* Make elements with the HTML hidden attribute stay hidden by default */
|
||||
|
||||
[hidden] {
|
||||
display: none;
|
||||
}
|
||||
|
||||
*, ::before, ::after {
|
||||
--tw-border-spacing-x: 0;
|
||||
--tw-border-spacing-y: 0;
|
||||
--tw-translate-x: 0;
|
||||
--tw-translate-y: 0;
|
||||
--tw-rotate: 0;
|
||||
--tw-skew-x: 0;
|
||||
--tw-skew-y: 0;
|
||||
--tw-scale-x: 1;
|
||||
--tw-scale-y: 1;
|
||||
--tw-pan-x: ;
|
||||
--tw-pan-y: ;
|
||||
--tw-pinch-zoom: ;
|
||||
--tw-scroll-snap-strictness: proximity;
|
||||
--tw-gradient-from-position: ;
|
||||
--tw-gradient-via-position: ;
|
||||
--tw-gradient-to-position: ;
|
||||
--tw-ordinal: ;
|
||||
--tw-slashed-zero: ;
|
||||
--tw-numeric-figure: ;
|
||||
--tw-numeric-spacing: ;
|
||||
--tw-numeric-fraction: ;
|
||||
--tw-ring-inset: ;
|
||||
--tw-ring-offset-width: 0px;
|
||||
--tw-ring-offset-color: #fff;
|
||||
--tw-ring-color: rgb(59 130 246 / 0.5);
|
||||
--tw-ring-offset-shadow: 0 0 #0000;
|
||||
--tw-ring-shadow: 0 0 #0000;
|
||||
--tw-shadow: 0 0 #0000;
|
||||
--tw-shadow-colored: 0 0 #0000;
|
||||
--tw-blur: ;
|
||||
--tw-brightness: ;
|
||||
--tw-contrast: ;
|
||||
--tw-grayscale: ;
|
||||
--tw-hue-rotate: ;
|
||||
--tw-invert: ;
|
||||
--tw-saturate: ;
|
||||
--tw-sepia: ;
|
||||
--tw-drop-shadow: ;
|
||||
--tw-backdrop-blur: ;
|
||||
--tw-backdrop-brightness: ;
|
||||
--tw-backdrop-contrast: ;
|
||||
--tw-backdrop-grayscale: ;
|
||||
--tw-backdrop-hue-rotate: ;
|
||||
--tw-backdrop-invert: ;
|
||||
--tw-backdrop-opacity: ;
|
||||
--tw-backdrop-saturate: ;
|
||||
--tw-backdrop-sepia: ;
|
||||
--tw-contain-size: ;
|
||||
--tw-contain-layout: ;
|
||||
--tw-contain-paint: ;
|
||||
--tw-contain-style: ;
|
||||
}
|
||||
|
||||
::backdrop {
|
||||
--tw-border-spacing-x: 0;
|
||||
--tw-border-spacing-y: 0;
|
||||
--tw-translate-x: 0;
|
||||
--tw-translate-y: 0;
|
||||
--tw-rotate: 0;
|
||||
--tw-skew-x: 0;
|
||||
--tw-skew-y: 0;
|
||||
--tw-scale-x: 1;
|
||||
--tw-scale-y: 1;
|
||||
--tw-pan-x: ;
|
||||
--tw-pan-y: ;
|
||||
--tw-pinch-zoom: ;
|
||||
--tw-scroll-snap-strictness: proximity;
|
||||
--tw-gradient-from-position: ;
|
||||
--tw-gradient-via-position: ;
|
||||
--tw-gradient-to-position: ;
|
||||
--tw-ordinal: ;
|
||||
--tw-slashed-zero: ;
|
||||
--tw-numeric-figure: ;
|
||||
--tw-numeric-spacing: ;
|
||||
--tw-numeric-fraction: ;
|
||||
--tw-ring-inset: ;
|
||||
--tw-ring-offset-width: 0px;
|
||||
--tw-ring-offset-color: #fff;
|
||||
--tw-ring-color: rgb(59 130 246 / 0.5);
|
||||
--tw-ring-offset-shadow: 0 0 #0000;
|
||||
--tw-ring-shadow: 0 0 #0000;
|
||||
--tw-shadow: 0 0 #0000;
|
||||
--tw-shadow-colored: 0 0 #0000;
|
||||
--tw-blur: ;
|
||||
--tw-brightness: ;
|
||||
--tw-contrast: ;
|
||||
--tw-grayscale: ;
|
||||
--tw-hue-rotate: ;
|
||||
--tw-invert: ;
|
||||
--tw-saturate: ;
|
||||
--tw-sepia: ;
|
||||
--tw-drop-shadow: ;
|
||||
--tw-backdrop-blur: ;
|
||||
--tw-backdrop-brightness: ;
|
||||
--tw-backdrop-contrast: ;
|
||||
--tw-backdrop-grayscale: ;
|
||||
--tw-backdrop-hue-rotate: ;
|
||||
--tw-backdrop-invert: ;
|
||||
--tw-backdrop-opacity: ;
|
||||
--tw-backdrop-saturate: ;
|
||||
--tw-backdrop-sepia: ;
|
||||
--tw-contain-size: ;
|
||||
--tw-contain-layout: ;
|
||||
--tw-contain-paint: ;
|
||||
--tw-contain-style: ;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
@media (min-width: 640px) {
|
||||
.container {
|
||||
max-width: 640px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
.container {
|
||||
max-width: 768px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1024px) {
|
||||
.container {
|
||||
max-width: 1024px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1280px) {
|
||||
.container {
|
||||
max-width: 1280px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1536px) {
|
||||
.container {
|
||||
max-width: 1536px;
|
||||
}
|
||||
}
|
||||
|
||||
.visible {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.invisible {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.collapse {
|
||||
visibility: collapse;
|
||||
}
|
||||
|
||||
.fixed {
|
||||
position: fixed;
|
||||
}
|
||||
|
||||
.absolute {
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.relative {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.inset-0 {
|
||||
inset: 0px;
|
||||
}
|
||||
|
||||
.z-10 {
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.z-50 {
|
||||
z-index: 50;
|
||||
}
|
||||
|
||||
.mb-2 {
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.mb-4 {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.mb-6 {
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
|
||||
.mt-4 {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.mt-6 {
|
||||
margin-top: 1.5rem;
|
||||
}
|
||||
|
||||
.block {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.flex {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.table {
|
||||
display: table;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.w-96 {
|
||||
width: 24rem;
|
||||
}
|
||||
|
||||
.w-full {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.min-w-full {
|
||||
min-width: 100%;
|
||||
}
|
||||
|
||||
.max-w-lg {
|
||||
max-width: 32rem;
|
||||
}
|
||||
|
||||
.border-collapse {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
.transform {
|
||||
transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));
|
||||
}
|
||||
|
||||
.items-center {
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.justify-center {
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.space-x-4 > :not([hidden]) ~ :not([hidden]) {
|
||||
--tw-space-x-reverse: 0;
|
||||
margin-right: calc(1rem * var(--tw-space-x-reverse));
|
||||
margin-left: calc(1rem * calc(1 - var(--tw-space-x-reverse)));
|
||||
}
|
||||
|
||||
.rounded-lg {
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.rounded-xl {
|
||||
border-radius: 0.75rem;
|
||||
}
|
||||
|
||||
.border {
|
||||
border-width: 1px;
|
||||
}
|
||||
|
||||
.bg-black {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(0 0 0 / var(--tw-bg-opacity));
|
||||
}
|
||||
|
||||
.bg-blue-500 {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(59 130 246 / var(--tw-bg-opacity));
|
||||
}
|
||||
|
||||
.bg-green-500 {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(34 197 94 / var(--tw-bg-opacity));
|
||||
}
|
||||
|
||||
.bg-red-500 {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(239 68 68 / var(--tw-bg-opacity));
|
||||
}
|
||||
|
||||
.bg-white {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(255 255 255 / var(--tw-bg-opacity));
|
||||
}
|
||||
|
||||
.bg-opacity-50 {
|
||||
--tw-bg-opacity: 0.5;
|
||||
}
|
||||
|
||||
.p-2 {
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
||||
.p-6 {
|
||||
padding: 1.5rem;
|
||||
}
|
||||
|
||||
.p-8 {
|
||||
padding: 2rem;
|
||||
}
|
||||
|
||||
.px-4 {
|
||||
padding-left: 1rem;
|
||||
padding-right: 1rem;
|
||||
}
|
||||
|
||||
.px-6 {
|
||||
padding-left: 1.5rem;
|
||||
padding-right: 1.5rem;
|
||||
}
|
||||
|
||||
.py-2 {
|
||||
padding-top: 0.5rem;
|
||||
padding-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.py-4 {
|
||||
padding-top: 1rem;
|
||||
padding-bottom: 1rem;
|
||||
}
|
||||
|
||||
.text-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.text-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.text-3xl {
|
||||
font-size: 1.875rem;
|
||||
line-height: 2.25rem;
|
||||
}
|
||||
|
||||
.text-4xl {
|
||||
font-size: 2.25rem;
|
||||
line-height: 2.5rem;
|
||||
}
|
||||
|
||||
.text-lg {
|
||||
font-size: 1.125rem;
|
||||
line-height: 1.75rem;
|
||||
}
|
||||
|
||||
.text-sm {
|
||||
font-size: 0.875rem;
|
||||
line-height: 1.25rem;
|
||||
}
|
||||
|
||||
.text-xl {
|
||||
font-size: 1.25rem;
|
||||
line-height: 1.75rem;
|
||||
}
|
||||
|
||||
.font-bold {
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.font-semibold {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.text-gray-900 {
|
||||
--tw-text-opacity: 1;
|
||||
color: rgb(17 24 39 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
.text-white {
|
||||
--tw-text-opacity: 1;
|
||||
color: rgb(255 255 255 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
.shadow {
|
||||
--tw-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1);
|
||||
--tw-shadow-colored: 0 1px 3px 0 var(--tw-shadow-color), 0 1px 2px -1px var(--tw-shadow-color);
|
||||
box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow);
|
||||
}
|
||||
|
||||
.shadow-2xl {
|
||||
--tw-shadow: 0 25px 50px -12px rgb(0 0 0 / 0.25);
|
||||
--tw-shadow-colored: 0 25px 50px -12px var(--tw-shadow-color);
|
||||
box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow);
|
||||
}
|
||||
|
||||
.shadow-lg {
|
||||
--tw-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1);
|
||||
--tw-shadow-colored: 0 10px 15px -3px var(--tw-shadow-color), 0 4px 6px -4px var(--tw-shadow-color);
|
||||
box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow);
|
||||
}
|
||||
|
||||
.transition {
|
||||
transition-property: color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, -webkit-backdrop-filter;
|
||||
transition-property: color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, backdrop-filter;
|
||||
transition-property: color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, backdrop-filter, -webkit-backdrop-filter;
|
||||
transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
|
||||
transition-duration: 150ms;
|
||||
}
|
||||
|
||||
.transition-colors {
|
||||
transition-property: color, background-color, border-color, text-decoration-color, fill, stroke;
|
||||
transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
|
||||
transition-duration: 150ms;
|
||||
}
|
||||
|
||||
/* Custom CSS for Moon and Sun Toggle */
|
||||
|
||||
:root {
|
||||
--darkbg: #251D29;
|
||||
--darkt: #FFD1F7;
|
||||
--lightbg: #f0f0f0;
|
||||
/* Light background color */
|
||||
--lightt: #333;
|
||||
/* Darker text color for light mode */
|
||||
/* Adjusted sizes for the smaller toggle */
|
||||
--toggleHeight: 5em;
|
||||
--toggleWidth: 10em;
|
||||
--toggleBtnRadius: 4em;
|
||||
--bgColor--night: #423966;
|
||||
--toggleBtn-bgColor--night: var(--bgColor--night);
|
||||
--mooncolor: #D9FBFF;
|
||||
--bgColor--day: #ffc107;
|
||||
/* Distinct yellow for day mode */
|
||||
--toggleBtn-bgColor--day: var(--bgColor--day);
|
||||
}
|
||||
|
||||
body {
|
||||
transition: all 0.2s ease-in-out;
|
||||
background: var(--darkbg);
|
||||
color: var(--darkt);
|
||||
}
|
||||
|
||||
.light {
|
||||
background: var(--lightbg);
|
||||
color: var(--lightt);
|
||||
}
|
||||
|
||||
.tdnn {
|
||||
position: absolute;
|
||||
top: 1em;
|
||||
right: 1em;
|
||||
font-size: 50%;
|
||||
height: var(--toggleHeight);
|
||||
width: var(--toggleWidth);
|
||||
border-radius: var(--toggleHeight);
|
||||
transition: all 500ms ease-in-out;
|
||||
background: var(--bgColor--night);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.day {
|
||||
background: var(--bgColor--day);
|
||||
}
|
||||
|
||||
.moon {
|
||||
position: relative;
|
||||
display: block;
|
||||
border-radius: 50%;
|
||||
transition: all 400ms ease-in-out;
|
||||
width: var(--toggleBtnRadius);
|
||||
height: var(--toggleBtnRadius);
|
||||
background: var(--bgColor--night);
|
||||
box-shadow:
|
||||
1.5em 1.25em 0 0em var(--mooncolor) inset,
|
||||
rgba(255, 255, 255, 0.1) 0em -3.5em 0 -2.25em,
|
||||
rgba(255, 255, 255, 0.1) 1.5em 3.5em 0 -2.25em,
|
||||
rgba(255, 255, 255, 0.1) 1em 6.5em 0 -2em,
|
||||
rgba(255, 255, 255, 0.1) 3em 1em 0 -2.05em,
|
||||
rgba(255, 255, 255, 0.1) 4em 4em 0 -2.25em,
|
||||
rgba(255, 255, 255, 0.1) 3em 6.5em 0 -2.25em,
|
||||
rgba(255, 255, 255, 0.1) -2em 3.5em 0 -2.25em,
|
||||
rgba(255, 255, 255, 0.1) -0.5em 5em 0 -2.25em;
|
||||
}
|
||||
|
||||
.sun {
|
||||
transform: translate(3em, 0) rotate(0deg);
|
||||
width: 4em;
|
||||
height: 4em;
|
||||
background: #fff;
|
||||
box-shadow:
|
||||
1.5em 1.5em 0 2.5em #fff inset,
|
||||
0 -2.5em 0 -1.35em #fff,
|
||||
1.75em -1.75em 0 -1.5em #fff,
|
||||
2.5em 0 0 -1.35em #fff,
|
||||
1.75em 1.75em 0 -1.5em #fff,
|
||||
0 2.5em 0 -1.35em #fff,
|
||||
-1.75em 1.75em 0 -1.5em #fff,
|
||||
-2.5em 0 0 -1.35em #fff,
|
||||
-1.75em -1.75em 0 -1.5em #fff;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
.dark\:bg-gray-800 {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(31 41 55 / var(--tw-bg-opacity));
|
||||
}
|
||||
|
||||
.dark\:text-gray-200 {
|
||||
--tw-text-opacity: 1;
|
||||
color: rgb(229 231 235 / var(--tw-text-opacity));
|
||||
}
|
||||
}
|
420
frontend/edit.html
Normal file
420
frontend/edit.html
Normal file
|
@ -0,0 +1,420 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en" x-data="appData()" x-init="init()">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Edit Time Entry</title>
|
||||
<link href="https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;500;700&display=swap" rel="stylesheet">
|
||||
<link href="/frontend/dist/styles.css" rel="stylesheet">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css">
|
||||
<script src="https://unpkg.com/alpinejs" defer></script>
|
||||
|
||||
<style>
|
||||
/* Default (Light Mode) Styles */
|
||||
body {
|
||||
background-color: #f5f5f5;
|
||||
color: #333;
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
text-align: center;
|
||||
height: 100vh;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
margin: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* Time Tracker Card */
|
||||
.time-tracker {
|
||||
background-color: #fff;
|
||||
color: #333;
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
max-width: 800px;
|
||||
z-index: 10;
|
||||
position: relative;
|
||||
margin: 0 auto;
|
||||
padding: 20px;
|
||||
border-radius: 15px;
|
||||
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
/* Dark Mode Styles */
|
||||
body.dark-mode {
|
||||
background-color: #1a202c;
|
||||
color: #cbd5e0;
|
||||
}
|
||||
|
||||
.dark-mode .time-tracker {
|
||||
background-color: #2d3748;
|
||||
color: #cbd5e0;
|
||||
}
|
||||
|
||||
/* SVG Background */
|
||||
.svg-background {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 1;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
display: inline-block;
|
||||
padding: 0.5em 1.5em;
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
color: white;
|
||||
background-color: #4CAF50;
|
||||
border: none;
|
||||
border-radius: 0.375em;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s ease, transform 0.1s ease;
|
||||
}
|
||||
|
||||
.btn:hover {
|
||||
background-color: #45a049;
|
||||
transform: scale(1.05);
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
background-color: #e53e3e;
|
||||
}
|
||||
|
||||
.btn-danger:hover {
|
||||
background-color: #c53030;
|
||||
}
|
||||
|
||||
/* Dark Mode for Buttons */
|
||||
.dark-mode .btn {
|
||||
background-color: #3182ce;
|
||||
}
|
||||
|
||||
.dark-mode .btn:hover {
|
||||
background-color: #2b6cb0;
|
||||
}
|
||||
|
||||
/* DateTime Input Styles */
|
||||
input[type="datetime-local"] {
|
||||
width: 100%;
|
||||
padding: 0.5em;
|
||||
margin: 0.5em 0;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 0.375em;
|
||||
font-size: 1em;
|
||||
transition: border-color 0.2s ease;
|
||||
}
|
||||
|
||||
input[type="datetime-local"]:focus {
|
||||
outline: none;
|
||||
border-color: #3182ce;
|
||||
}
|
||||
|
||||
.dark-mode input[type="datetime-local"] {
|
||||
background-color: #2d3748;
|
||||
color: #cbd5e0;
|
||||
border-color: #4a5568;
|
||||
}
|
||||
|
||||
.dark-mode input[type="datetime-local"]:focus {
|
||||
border-color: #63b3ed;
|
||||
}
|
||||
|
||||
/* Modal */
|
||||
#edit-modal {
|
||||
display: none;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
z-index: 1000;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#edit-modal .modal-content {
|
||||
background-color: white;
|
||||
padding: 20px;
|
||||
margin: auto;
|
||||
width: 50%;
|
||||
border-radius: 10px;
|
||||
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
|
||||
/* Dark Mode for Modal */
|
||||
.dark-mode #edit-modal .modal-content {
|
||||
background-color: #2d3748;
|
||||
color: #cbd5e0;
|
||||
}
|
||||
|
||||
/* Table Styles */
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
thead th {
|
||||
background-color: #e2e8f0;
|
||||
color: #2d3748;
|
||||
padding: 0.75em;
|
||||
border-bottom: 2px solid #cbd5e0;
|
||||
}
|
||||
|
||||
tbody tr:nth-child(even) {
|
||||
background-color: #f7fafc;
|
||||
}
|
||||
|
||||
tbody tr:nth-child(odd) {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
|
||||
tbody td {
|
||||
padding: 0.75em;
|
||||
border-bottom: 1px solid #cbd5e0;
|
||||
}
|
||||
|
||||
body.dark-mode table {
|
||||
background-color: #2d3748;
|
||||
color: #cbd5e0;
|
||||
}
|
||||
|
||||
body.dark-mode thead th {
|
||||
background-color: #4a5568;
|
||||
color: #edf2f7;
|
||||
}
|
||||
|
||||
body.dark-mode tbody tr:nth-child(even) {
|
||||
background-color: #2c3440;
|
||||
}
|
||||
|
||||
body.dark-mode tbody tr:nth-child(odd) {
|
||||
background-color: #1f2733;
|
||||
}
|
||||
|
||||
body.dark-mode tbody td {
|
||||
border-bottom: 1px solid #4a5568;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body :class="{ 'dark-mode': isDarkMode }">
|
||||
<!-- SVG Wave Animation Background -->
|
||||
<svg class="svg-background" version="1.1" xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="100%" height="100%" viewBox="0 0 1600 900" preserveAspectRatio="xMidYMax slice">
|
||||
<defs>
|
||||
<linearGradient id="bg">
|
||||
<stop offset="0%" style="stop-color:rgba(130, 158, 249, 0.06)"></stop>
|
||||
<stop offset="50%" style="stop-color:rgba(76, 190, 255, 0.6)"></stop>
|
||||
<stop offset="100%" style="stop-color:rgba(115, 209, 72, 0.2)"></stop>
|
||||
</linearGradient>
|
||||
<path id="wave" fill="url(#bg)" d="M-363.852,452.589c0,0,236.988-91.997,505.475,0s371.981,88.998,575.971,0s293.985-89.278,505.474,5.859s493.475,98.368,716.963-4.995v560.106H-363.852V452.589z" />
|
||||
</defs>
|
||||
<g>
|
||||
<use xlink:href='#wave' opacity=".3">
|
||||
<animateTransform attributeName="transform" attributeType="XML" type="translate" dur="10s" calcMode="spline"
|
||||
values="270 230; -334 180; 270 230" keyTimes="0; .5; 1" keySplines="0.42, 0, 0.58, 1.0;0.42, 0, 0.58, 1.0" repeatCount="indefinite" />
|
||||
</use>
|
||||
<use xlink:href='#wave' opacity=".6">
|
||||
<animateTransform attributeName="transform" attributeType="XML" type="translate" dur="8s" calcMode="spline"
|
||||
values="-270 230;243 220;-270 230" keyTimes="0; .6; 1" keySplines="0.42, 0, 0.58, 1.0;0.42, 0, 0.58, 1.0" repeatCount="indefinite" />
|
||||
</use>
|
||||
<use xlink:href='#wave' opacity=".9">
|
||||
<animateTransform attributeName="transform" attributeType="XML" type="translate" dur="6s" calcMode="spline"
|
||||
values="0 230;-140 200;0 230" keyTimes="0; .4; 1" keySplines="0.42, 0, 0.58, 1.0;0.42, 0, 0.58, 1.0" repeatCount="indefinite" />
|
||||
</use>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
<!-- Main Content -->
|
||||
<div class="relative max-w-lg w-full time-tracker shadow-2xl rounded-xl p-8 z-10">
|
||||
<h1 class="text-4xl font-bold mb-6 text-center">Edit Time Entry</h1>
|
||||
|
||||
<!-- Go Home Button -->
|
||||
<div class="text-center mb-4">
|
||||
<a href="/" class="btn">Go Home</a>
|
||||
</div>
|
||||
|
||||
<!-- Form to select a user -->
|
||||
<form id="user-form">
|
||||
<label for="user">Select User:</label>
|
||||
<select id="user" name="user" required></select>
|
||||
<button class="btn" type="submit">Fetch Times</button>
|
||||
</form>
|
||||
|
||||
<!-- Table to display clock-in/clock-out times -->
|
||||
<table id="time-table" class="time-entry-table mt-6" style="display:none;">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Date</th>
|
||||
<th>Clock In</th>
|
||||
<th>Clock Out</th>
|
||||
<th>Edit</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
|
||||
<!-- Edit Form Modal -->
|
||||
<div id="edit-modal" class="hidden flex">
|
||||
<div class="modal-content">
|
||||
<h2>Edit Clock In/Out Times for <span id="edit-date"></span></h2>
|
||||
<form id="edit-form">
|
||||
<input type="hidden" id="entry-id">
|
||||
<label for="clock_in">Clock In:</label>
|
||||
<input type="datetime-local" id="edit-clock-in" name="clock_in">
|
||||
<br>
|
||||
<label for="clock_out">Clock Out:</label>
|
||||
<input type="datetime-local" id="edit-clock-out" name="clock_out">
|
||||
<br>
|
||||
<button class="btn" type="submit">Save Changes</button>
|
||||
<button class="btn btn-danger" type="button" onclick="closeModal()">Cancel</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function appData() {
|
||||
return {
|
||||
isDarkMode: false,
|
||||
|
||||
// Initialize the app
|
||||
init() {
|
||||
this.isDarkMode = localStorage.getItem('darkMode') === 'true';
|
||||
if (this.isDarkMode) {
|
||||
document.body.classList.add('dark-mode');
|
||||
} else {
|
||||
document.body.classList.remove('dark-mode');
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle between dark mode and light mode
|
||||
toggleDarkMode() {
|
||||
this.isDarkMode = !this.isDarkMode;
|
||||
localStorage.setItem('darkMode', this.isDarkMode);
|
||||
document.body.classList.toggle('dark-mode', this.isDarkMode);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchUsers() {
|
||||
const response = await fetch('/users');
|
||||
const users = await response.json();
|
||||
const userSelect = document.getElementById('user');
|
||||
users.forEach(user => {
|
||||
const option = document.createElement('option');
|
||||
option.value = user.name;
|
||||
option.text = user.name;
|
||||
userSelect.add(option);
|
||||
});
|
||||
}
|
||||
|
||||
function formatTime(datetimeString) {
|
||||
if (!datetimeString) return 'N/A';
|
||||
const date = new Date(datetimeString);
|
||||
const options = {
|
||||
hour: 'numeric',
|
||||
minute: 'numeric',
|
||||
hour12: true, // Set to false for 24-hour format
|
||||
};
|
||||
return date.toLocaleTimeString([], options);
|
||||
}
|
||||
|
||||
async function fetchTimeEntries(user) {
|
||||
const response = await fetch(`/time/${user}/recall/month`);
|
||||
const data = await response.json();
|
||||
const tableBody = document.getElementById('time-table').getElementsByTagName('tbody')[0];
|
||||
tableBody.innerHTML = ''; // Clear existing table data
|
||||
data.entries.forEach(entry => {
|
||||
const row = tableBody.insertRow();
|
||||
row.insertCell(0).innerText = entry.date;
|
||||
row.insertCell(1).innerText = entry.clock_in ? formatTime(entry.clock_in) : 'N/A';
|
||||
row.insertCell(2).innerText = entry.clock_out ? formatTime(entry.clock_out) : 'N/A';
|
||||
const editCell = row.insertCell(3);
|
||||
const editButton = document.createElement('button');
|
||||
editButton.innerText = 'Edit';
|
||||
editButton.className = 'btn';
|
||||
editButton.onclick = () => openEditModal(user, entry.date, entry.clock_in, entry.clock_out);
|
||||
editCell.appendChild(editButton);
|
||||
});
|
||||
document.getElementById('time-table').style.display = 'table'; // Show table
|
||||
}
|
||||
|
||||
function toLocalDatetimeInputValue(date) {
|
||||
if (!date) return '';
|
||||
const dt = new Date(date);
|
||||
const year = dt.getFullYear();
|
||||
const month = ('0' + (dt.getMonth() + 1)).slice(-2);
|
||||
const day = ('0' + dt.getDate()).slice(-2);
|
||||
const hours = ('0' + dt.getHours()).slice(-2);
|
||||
const minutes = ('0' + dt.getMinutes()).slice(-2);
|
||||
return `${year}-${month}-${day}T${hours}:${minutes}`;
|
||||
}
|
||||
|
||||
function localDatetimeToUTC(datetimeLocal) {
|
||||
if (!datetimeLocal) return null;
|
||||
const localDate = new Date(datetimeLocal);
|
||||
return localDate.toISOString();
|
||||
}
|
||||
|
||||
function openEditModal(user, date, clockIn, clockOut) {
|
||||
document.getElementById('edit-date').innerText = date;
|
||||
document.getElementById('entry-id').value = date;
|
||||
document.getElementById('edit-clock-in').value = clockIn ? toLocalDatetimeInputValue(clockIn) : '';
|
||||
document.getElementById('edit-clock-out').value = clockOut ? toLocalDatetimeInputValue(clockOut) : '';
|
||||
document.getElementById('edit-modal').style.display = 'flex';
|
||||
}
|
||||
|
||||
function closeModal() {
|
||||
document.getElementById('edit-modal').style.display = 'none';
|
||||
}
|
||||
|
||||
async function submitForm(event) {
|
||||
event.preventDefault();
|
||||
const user = document.getElementById('user').value;
|
||||
const date = document.getElementById('entry-id').value;
|
||||
const clockIn = document.getElementById('edit-clock-in').value;
|
||||
const clockOut = document.getElementById('edit-clock-out').value;
|
||||
|
||||
const clockInUTC = clockIn ? localDatetimeToUTC(clockIn) : null;
|
||||
const clockOutUTC = clockOut ? localDatetimeToUTC(clockOut) : null;
|
||||
|
||||
const response = await fetch(`/time/${user}/edit`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
date: date,
|
||||
clock_in_time: clockInUTC,
|
||||
clock_out_time: clockOutUTC,
|
||||
}),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
alert('Times updated successfully');
|
||||
closeModal();
|
||||
fetchTimeEntries(user);
|
||||
} else {
|
||||
const error = await response.json();
|
||||
alert('Error updating times: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById('user-form').addEventListener('submit', function(event) {
|
||||
event.preventDefault();
|
||||
const user = document.getElementById('user').value;
|
||||
fetchTimeEntries(user);
|
||||
});
|
||||
|
||||
document.getElementById('edit-form').addEventListener('submit', submitForm);
|
||||
|
||||
fetchUsers();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
856
frontend/index.html
Normal file
856
frontend/index.html
Normal file
|
@ -0,0 +1,856 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en" x-data="appData()" x-init="init()">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Time Tracker</title>
|
||||
<link href="https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;500;700&display=swap" rel="stylesheet">
|
||||
<link href="/frontend/dist/styles.css" rel="stylesheet">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css">
|
||||
<script src="https://unpkg.com/alpinejs" defer></script>
|
||||
|
||||
<!-- Icons -->
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/static/apple-touch-icon.png">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/static/favicon-16x16.png">
|
||||
<link rel="manifest" href="/static/site.webmanifest">
|
||||
<link rel="mask-icon" href="/static/safari-pinned-tab.svg" color="#5bbad5">
|
||||
<link rel="shortcut icon" href="/static/favicon.ico">
|
||||
<meta name="msapplication-TileColor" content="#2b5797">
|
||||
<meta name="theme-color" content="#ffffff">
|
||||
|
||||
<!-- Custom Styles for Light and Dark Modes and Clock Icon -->
|
||||
<style>
|
||||
/* Default (Light Mode) Styles */
|
||||
body {
|
||||
background-color: #f5f5f5;
|
||||
color: #333;
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
text-align: center; /* Center all elements */
|
||||
padding: 80px 0; /* Add padding to top and bottom */
|
||||
margin: 0; /* Remove default margin */
|
||||
}
|
||||
|
||||
/* Time Tracker Card */
|
||||
.time-tracker {
|
||||
background-color: #fff;
|
||||
color: #333;
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
max-width: 800px; /* Increased max-width for wider card */
|
||||
z-index: 10; /* Ensures the tracker is on top of the SVG animation */
|
||||
position: relative; /* Ensure this element is positioned above the SVG */
|
||||
margin: 0 auto; /* Center the time tracker card */
|
||||
}
|
||||
|
||||
/* Dark Mode Styles */
|
||||
body.dark-mode {
|
||||
background-color: #1a202c;
|
||||
color: #cbd5e0;
|
||||
}
|
||||
|
||||
.dark-mode .time-tracker {
|
||||
background-color: #2d3748;
|
||||
color: #cbd5e0;
|
||||
}
|
||||
|
||||
/* Dark Mode SVG Colors */
|
||||
body.dark-mode .svg-background stop:nth-child(1) {
|
||||
stop-color: rgba(20, 30, 48, 0.06); /* Darker gradient start */
|
||||
}
|
||||
body.dark-mode .svg-background stop:nth-child(2) {
|
||||
stop-color: rgba(36, 59, 85, 0.6); /* Darker gradient middle */
|
||||
}
|
||||
body.dark-mode .svg-background stop:nth-child(3) {
|
||||
stop-color: rgba(52, 73, 94, 0.2); /* Darker gradient end */
|
||||
}
|
||||
|
||||
/* SVG Background */
|
||||
.svg-background {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 1; /* Ensure SVG is below the main content */
|
||||
pointer-events: none; /* Makes sure SVG doesn't interfere with interactions */
|
||||
}
|
||||
|
||||
/* Button Styles */
|
||||
.toggle-button {
|
||||
margin-bottom: 1em;
|
||||
padding: 0.5em 1em;
|
||||
background-color: #edf2f7;
|
||||
color: #2d3748;
|
||||
border-radius: 0.375em;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
}
|
||||
|
||||
/* Clock In/Out Button */
|
||||
.clock-in-out-button {
|
||||
max-width: 300px; /* Set a max-width to limit the button size */
|
||||
width: 100%; /* Ensure it takes up to 100% of its container's width */
|
||||
padding: 0.75em 1.5em; /* Adjust padding for better appearance */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin: 0 auto; /* Center the button */
|
||||
margin-bottom: 2em; /* Add bottom margin for more spacing */
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
font-size: 1.2em; /* Adjust font size if needed */
|
||||
}
|
||||
|
||||
/* Dark Mode Button Styles */
|
||||
.dark-mode .toggle-button {
|
||||
background-color: #4a5568;
|
||||
color: #edf2f7;
|
||||
}
|
||||
|
||||
/* Clock Icon Styles */
|
||||
.clock {
|
||||
position: relative;
|
||||
transform: scale(1.5); /* Adjust size of the clock */
|
||||
border-radius: 50%;
|
||||
border: 2px solid;
|
||||
width: 30px; /* Adjusted size */
|
||||
height: 30px; /* Adjusted size */
|
||||
margin-left: 10px; /* Add some space between text and icon */
|
||||
}
|
||||
|
||||
.clock:after, .clock:before {
|
||||
position: absolute;
|
||||
width: 0px;
|
||||
display: block;
|
||||
border-left: 2px solid #000;
|
||||
content: '';
|
||||
left: 50%; /* Center horizontally */
|
||||
top: 50%; /* Center vertically */
|
||||
transform-origin: center bottom; /* Origin should be at the bottom center */
|
||||
transform: translateX(-50%) translateY(-100%); /* Move to center */
|
||||
}
|
||||
|
||||
/* Minute Hand */
|
||||
.clock:after {
|
||||
height: 12px; /* Adjusted hand length */
|
||||
animation: minute-dial 1s linear infinite; /* Add animation */
|
||||
}
|
||||
|
||||
/* Hour Hand */
|
||||
.clock:before {
|
||||
height: 8px; /* Adjusted hand length */
|
||||
animation: hour-dial 60s linear infinite; /* Add animation */
|
||||
}
|
||||
|
||||
/* Keyframes for minute hand */
|
||||
@keyframes minute-dial {
|
||||
0% { transform: translateX(-50%) translateY(-100%) rotate(0deg); }
|
||||
100% { transform: translateX(-50%) translateY(-100%) rotate(360deg); }
|
||||
}
|
||||
|
||||
/* Keyframes for hour hand */
|
||||
@keyframes hour-dial {
|
||||
0% { transform: translateX(-50%) translateY(-100%) rotate(0deg); }
|
||||
100% { transform: translateX(-50%) translateY(-100%) rotate(360deg); }
|
||||
}
|
||||
|
||||
/* Table Styles */
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
thead th {
|
||||
background-color: #e2e8f0; /* Light grey background for headers */
|
||||
color: #2d3748; /* Darker text for headers */
|
||||
padding: 0.75em;
|
||||
border-bottom: 2px solid #cbd5e0; /* Border for table headers */
|
||||
}
|
||||
|
||||
tbody tr:nth-child(even) {
|
||||
background-color: #f7fafc; /* Light grey for even rows */
|
||||
}
|
||||
|
||||
tbody tr:nth-child(odd) {
|
||||
background-color: #ffffff; /* White for odd rows */
|
||||
}
|
||||
|
||||
tbody td {
|
||||
padding: 0.75em;
|
||||
border-bottom: 1px solid #cbd5e0; /* Light grey border for cells */
|
||||
}
|
||||
|
||||
/* Dark Mode Table Styles */
|
||||
body.dark-mode table {
|
||||
background-color: #2d3748; /* Dark background for the table */
|
||||
color: #cbd5e0; /* Light text color */
|
||||
}
|
||||
|
||||
body.dark-mode thead th {
|
||||
background-color: #4a5568; /* Darker background for headers */
|
||||
color: #edf2f7; /* Light text for headers */
|
||||
}
|
||||
|
||||
body.dark-mode tbody tr:nth-child(even) {
|
||||
background-color: #2c3440; /* Slightly lighter dark grey for even rows */
|
||||
}
|
||||
|
||||
body.dark-mode tbody tr:nth-child(odd) {
|
||||
background-color: #1f2733; /* Slightly darker grey for odd rows */
|
||||
}
|
||||
|
||||
body.dark-mode tbody td {
|
||||
border-bottom: 1px solid #4a5568; /* Darker grey border for cells */
|
||||
}
|
||||
|
||||
/* Add borders to improve contrast */
|
||||
table, th, td {
|
||||
border: 1px solid #cbd5e0; /* Border color for light mode */
|
||||
}
|
||||
|
||||
body.dark-mode table, body.dark-mode th, body.dark-mode td {
|
||||
border: 1px solid #4a5568; /* Border color for dark mode */
|
||||
}
|
||||
|
||||
/* Status Message Styles */
|
||||
.status-message {
|
||||
position: fixed;
|
||||
top: 20px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background-color: rgba(50, 50, 50, 0.8); /* Default soft dark background */
|
||||
color: #fff;
|
||||
padding: 0.75em 1.5em;
|
||||
border-radius: 5px;
|
||||
opacity: 0; /* Start as invisible */
|
||||
z-index: 9999; /* Ensure it's on top */
|
||||
transition: opacity 0.5s ease, transform 0.5s ease;
|
||||
}
|
||||
|
||||
/* Soft Colors for Success and Error */
|
||||
.status-success {
|
||||
background-color: rgba(72, 187, 120, 0.9); /* Soft green for success */
|
||||
}
|
||||
|
||||
.status-error {
|
||||
background-color: rgba(229, 83, 83, 0.9); /* Soft red for error */
|
||||
}
|
||||
|
||||
/* Show the message */
|
||||
.show {
|
||||
opacity: 1; /* Fade-in effect */
|
||||
transform: translateX(-50%) translateY(0); /* Move to visible position */
|
||||
}
|
||||
|
||||
/* Hide the message */
|
||||
.hidden {
|
||||
opacity: 0; /* Fade-out effect */
|
||||
transform: translateX(-50%) translateY(-10px); /* Slightly move up when hidden */
|
||||
}
|
||||
|
||||
/* Style for the switch user icon */
|
||||
.switch-user-icon {
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
cursor: pointer;
|
||||
font-size: 1.2em; /* Adjust size as needed */
|
||||
color: #333; /* Default color */
|
||||
transition: color 0.3s;
|
||||
}
|
||||
|
||||
/* Change color on hover */
|
||||
.switch-user-icon:hover {
|
||||
color: #007bff; /* Change to a different color on hover */
|
||||
}
|
||||
|
||||
/* Dark mode styles */
|
||||
body.dark-mode .switch-user-icon {
|
||||
color: #cbd5e0; /* Color for dark mode */
|
||||
}
|
||||
|
||||
body.dark-mode .switch-user-icon:hover {
|
||||
color: #4a90e2; /* Hover color for dark mode */
|
||||
}
|
||||
/* Light mode footer styles */
|
||||
footer {
|
||||
text-align: center; /* Center the text */
|
||||
padding: 20px; /* Add some padding */
|
||||
font-weight: 300; /* Set the font weight to light (300) */
|
||||
color: #777; /* Light gray text color */
|
||||
background-color: #f9f9f9; /* Light background color */
|
||||
position: fixed; /* Make the footer stick to the bottom */
|
||||
bottom: 0;
|
||||
width: 100%; /* Make the footer span the full width */
|
||||
transition: background-color 0.3s, color 0.3s; /* Smooth transition */
|
||||
}
|
||||
|
||||
/* Dark mode footer styles */
|
||||
body.dark-mode footer {
|
||||
background-color: #1a202c; /* Dark background color */
|
||||
color: #cbd5e0; /* Light gray text color for dark mode */
|
||||
}
|
||||
/* Modal Box Styling */
|
||||
#userPromptModal {
|
||||
position: fixed; /* Fixed positioning */
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: rgba(0, 0, 0, 0.75); /* Dim background */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
opacity: 0; /* Start as invisible */
|
||||
pointer-events: none; /* Prevent interaction when hidden */
|
||||
transition: opacity 0.3s ease; /* Smooth transition */
|
||||
z-index: 9999; /* Ensure it is on top of other content */
|
||||
}
|
||||
#userPromptModal > div {
|
||||
background-color: #ffffff; /* Default background color */
|
||||
color: #333; /* Default text color */
|
||||
padding: 1.5em;
|
||||
border-radius: 0.5em;
|
||||
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.3); /* Box shadow for a raised effect */
|
||||
transform: scale(1);
|
||||
opacity: 1;
|
||||
transition: transform 0.3s ease-out, opacity 0.3s ease-out;
|
||||
text-align: center; /* Center content inside the modal */
|
||||
}
|
||||
/* Visible state for modal */
|
||||
#userPromptModal.visible {
|
||||
opacity: 1;
|
||||
pointer-events: auto; /* Allow interaction */
|
||||
}
|
||||
/* Center the buttons */
|
||||
#userPromptModal .modal-buttons {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
gap: 10px; /* Space between the buttons */
|
||||
margin-top: 1em; /* Space above the buttons */
|
||||
}
|
||||
|
||||
#userPromptModal .modal-buttons button {
|
||||
padding: 0.5em 1.5em;
|
||||
border-radius: 0.5em;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
/* Hidden state for the modal box */
|
||||
#userPromptModal.hidden > div {
|
||||
transform: scale(0.95);
|
||||
opacity: 0;
|
||||
transition: transform 0.2s ease-in, opacity 0.2s ease-in;
|
||||
}
|
||||
/* Dark Mode Support for Modal */
|
||||
body.dark-mode #userPromptModal > div {
|
||||
background-color: #2d3748; /* Dark mode background */
|
||||
color: #cbd5e0; /* Dark mode text color */
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body :class="{ 'dark-mode': isDarkMode }">
|
||||
|
||||
<!-- Custom Modal for User Creation Prompt -->
|
||||
<div id="userPromptModal" class="fixed inset-0 flex items-center justify-center bg-black bg-opacity-50 hidden z-50">
|
||||
<div class="bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-200 rounded-lg shadow-lg p-6 w-96">
|
||||
<h2 class="text-xl font-bold mb-4">User Not Found</h2>
|
||||
<p id="modalMessage" class="mb-4">User "<span id="usernameSpan"></span>" does not exist. Do you want to create a new user?</p>
|
||||
<!-- Modal Buttons -->
|
||||
<div class="modal-buttons">
|
||||
<button @click="toggleModal(false)" class="bg-red-500 text-white py-2 px-4 rounded-lg">Cancel</button>
|
||||
<button @click="createUser()" class="bg-blue-500 text-white py-2 px-4 rounded-lg">Create User</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- SVG Wave Animation Background -->
|
||||
<svg class="svg-background" version="1.1" xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="100%" height="100%" viewBox="0 0 1600 900" preserveAspectRatio="xMidYMax slice">
|
||||
<defs>
|
||||
<linearGradient id="bg">
|
||||
<stop offset="0%" style="stop-color:rgba(130, 158, 249, 0.06)"></stop>
|
||||
<stop offset="50%" style="stop-color:rgba(76, 190, 255, 0.6)"></stop>
|
||||
<stop offset="100%" style="stop-color:rgba(115, 209, 72, 0.2)"></stop>
|
||||
</linearGradient>
|
||||
<!-- Higher Waves Path -->
|
||||
<path id="wave" fill="url(#bg)" d="M-363.852,452.589c0,0,236.988-91.997,505.475,0s371.981,88.998,575.971,0s293.985-89.278,505.474,5.859s493.475,98.368,716.963-4.995v560.106H-363.852V452.589z" />
|
||||
</defs>
|
||||
<g>
|
||||
<use xlink:href='#wave' opacity=".3">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
attributeType="XML"
|
||||
type="translate"
|
||||
dur="10s"
|
||||
calcMode="spline"
|
||||
values="270 230; -334 180; 270 230"
|
||||
keyTimes="0; .5; 1"
|
||||
keySplines="0.42, 0, 0.58, 1.0;0.42, 0, 0.58, 1.0"
|
||||
repeatCount="indefinite" />
|
||||
</use>
|
||||
<use xlink:href='#wave' opacity=".6">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
attributeType="XML"
|
||||
type="translate"
|
||||
dur="8s"
|
||||
calcMode="spline"
|
||||
values="-270 230;243 220;-270 230"
|
||||
keyTimes="0; .6; 1"
|
||||
keySplines="0.42, 0, 0.58, 1.0;0.42, 0, 0.58, 1.0"
|
||||
repeatCount="indefinite" />
|
||||
</use>
|
||||
<use xlink:href='#wave' opacity=".9">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
attributeType="XML"
|
||||
type="translate"
|
||||
dur="6s"
|
||||
calcMode="spline"
|
||||
values="0 230;-140 200;0 230"
|
||||
keyTimes="0; .4; 1"
|
||||
keySplines="0.42, 0, 0.58, 1.0;0.42, 0, 0.58, 1.0"
|
||||
repeatCount="indefinite" />
|
||||
</use>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
<!-- Status Message Container -->
|
||||
<div id="statusMessage" class="status-message hidden"></div>
|
||||
|
||||
<!-- Main Content -->
|
||||
<div class="relative max-w-lg w-full time-tracker shadow-2xl rounded-xl p-8 z-10">
|
||||
<!-- Switch User Icon -->
|
||||
<div class="switch-user-icon" @click="clearUser">
|
||||
<i class="fas fa-user"></i>
|
||||
</div>
|
||||
<h1 class="text-4xl font-bold mb-6 text-center">Time Tracker</h1>
|
||||
|
||||
<!-- Toggle Dark Mode Button -->
|
||||
<div class="flex justify-center mb-4">
|
||||
<button @click="toggleDarkMode()" class="toggle-button">Toggle Dark Mode</button>
|
||||
</div>
|
||||
|
||||
<!-- User Input and Clock In/Out Logic -->
|
||||
<div class="text-center">
|
||||
<template x-if="!userName">
|
||||
<div>
|
||||
<label for="userNameInput" class="block text-sm mb-2">Enter your name:</label>
|
||||
<input id="userNameInput" type="text" class="w-full border p-2 mb-4" x-model="userNameInput" placeholder="Your name" style="width: 250px;">
|
||||
<div class="mt-4">
|
||||
<button @click="saveUser()" class="bg-blue-500 text-white py-2 px-4 rounded-lg">Save Name</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<!-- Show Clock In/Out and Query Buttons if User Exists -->
|
||||
<template x-if="userName">
|
||||
<div>
|
||||
<p class="mb-4">Welcome, <span x-text="capitalizeFirstLetter(userName)"></span>!</p>
|
||||
<!-- Show current status -->
|
||||
<p class="mb-4" x-show="userStatus !== null">Status: <span x-text="capitalizeFirstLetter(userStatus)"></span></p>
|
||||
|
||||
<!-- Clock In/Out Button -->
|
||||
<button @click="clockInOut()" :disabled="isProcessingClockInOut"
|
||||
class="clock-in-out-button flex items-center justify-center w-full py-4 px-6 text-lg font-semibold rounded-lg transition-colors mb-6"
|
||||
:class="isClockedIn ? 'bg-red-500 text-white' : 'bg-green-500 text-white'">
|
||||
<span x-text="isClockedIn ? 'Clock Out' : 'Clock In'"></span>
|
||||
<div class="clock" x-show="isClockedIn"></div>
|
||||
</button>
|
||||
|
||||
<!-- Delete Today's Entry Button, visible only if the user has clocked in today -->
|
||||
<template x-if="isClockedInToday">
|
||||
<div class="mb-4">
|
||||
<button @click="deleteTodayEntry()" class="bg-red-500 text-white py-2 px-4 rounded-lg">
|
||||
Delete Today's Entry
|
||||
</button>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<!-- Data Query Section -->
|
||||
<div class="mb-4">
|
||||
<h2 class="text-xl font-semibold mb-2">Get time data for the:</h2>
|
||||
<div class="flex justify-center space-x-4">
|
||||
<button @click="fetchData('week')" class="bg-blue-500 text-white py-2 px-4 rounded-lg">Week</button>
|
||||
<button @click="fetchData('payperiod')" class="bg-blue-500 text-white py-2 px-4 rounded-lg">Payperiod</button>
|
||||
<button @click="fetchData('month')" class="bg-blue-500 text-white py-2 px-4 rounded-lg">Month</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<!-- Display Total Days and Hours Worked -->
|
||||
<div x-show="tableData.length > 0" class="mt-4">
|
||||
<h2 class="text-3xl font-bold mb-6">
|
||||
<span x-text="daysWorked"></span> Days,
|
||||
<span x-text="totalHours"></span> Hours
|
||||
</h2>
|
||||
</div>
|
||||
|
||||
<!-- Data Table (hidden unless data is present) -->
|
||||
<div x-show="tableData.length > 0" class="mt-6">
|
||||
<h2 class="text-xl font-semibold mb-4">Hours Worked</h2>
|
||||
<table class="min-w-full bg-white text-left">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="border px-4 py-2">Date</th>
|
||||
<th class="border px-4 py-2">Clock In</th>
|
||||
<th class="border px-4 py-2">Clock Out</th>
|
||||
<th class="border px-4 py-2">Total Time</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<template x-for="row in tableData" :key="row.dateRange">
|
||||
<tr>
|
||||
<td class="border px-4 py-2" x-text="row.dateRange"></td>
|
||||
<td class="border px-4 py-2" x-text="row.clockInTime"></td>
|
||||
<td class="border px-4 py-2" x-text="row.clockOutTime"></td>
|
||||
<td class="border px-4 py-2" x-text="row.totalTime"></td>
|
||||
</tr>
|
||||
</template>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<!-- Export Button -->
|
||||
<button @click="exportTable()" class="mt-4 bg-green-500 text-white py-2 px-4 rounded-lg">Export Data</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Footer -->
|
||||
<footer>
|
||||
© Powered by Sean and Coffee
|
||||
</footer>
|
||||
<script>
|
||||
function appData() {
|
||||
return {
|
||||
isDarkMode: false,
|
||||
userName: null,
|
||||
userNameInput: '',
|
||||
isClockedIn: false,
|
||||
isClockedInToday: false,
|
||||
userStatus: null,
|
||||
tableData: [],
|
||||
totalHours: 0,
|
||||
daysWorked: 0,
|
||||
modalVisible: false,
|
||||
modalUsername: '',
|
||||
isProcessingClockInOut: false,
|
||||
|
||||
// Initialize the app
|
||||
init() {
|
||||
this.isDarkMode = localStorage.getItem('darkMode') === 'true';
|
||||
if (this.isDarkMode) {
|
||||
document.body.classList.add('dark-mode');
|
||||
} else {
|
||||
document.body.classList.remove('dark-mode');
|
||||
}
|
||||
this.checkUser();
|
||||
},
|
||||
|
||||
// Capitalize the first letter of the username
|
||||
capitalizeFirstLetter(name) {
|
||||
return name.charAt(0).toUpperCase() + name.slice(1);
|
||||
},
|
||||
|
||||
// Show status message
|
||||
showStatusMessage(message, type) {
|
||||
const statusMessageElement = document.getElementById('statusMessage');
|
||||
statusMessageElement.textContent = message;
|
||||
statusMessageElement.className = 'status-message hidden';
|
||||
|
||||
if (type === 'success') {
|
||||
statusMessageElement.classList.add('status-success');
|
||||
} else {
|
||||
statusMessageElement.classList.add('status-error');
|
||||
}
|
||||
|
||||
statusMessageElement.classList.remove('hidden');
|
||||
statusMessageElement.classList.add('show');
|
||||
|
||||
if (this.statusMessageTimeout) {
|
||||
clearTimeout(this.statusMessageTimeout);
|
||||
}
|
||||
|
||||
this.statusMessageTimeout = setTimeout(() => {
|
||||
statusMessageElement.classList.remove('show');
|
||||
statusMessageElement.classList.add('hidden');
|
||||
}, 3000);
|
||||
},
|
||||
|
||||
// Toggle between dark mode and light mode
|
||||
toggleDarkMode() {
|
||||
this.isDarkMode = !this.isDarkMode;
|
||||
localStorage.setItem('darkMode', this.isDarkMode);
|
||||
document.body.classList.toggle('dark-mode', this.isDarkMode);
|
||||
},
|
||||
|
||||
// Save user to cookie
|
||||
saveUser() {
|
||||
if (this.isSavingUser) return;
|
||||
this.isSavingUser = true;
|
||||
|
||||
const username = this.userNameInput.toLowerCase();
|
||||
fetch(`/user/status/${username}`)
|
||||
.then(response => {
|
||||
if (response.ok) {
|
||||
// If the user exists, set the cookie and check user status
|
||||
this.setCookie('userName', username, 7);
|
||||
this.userName = username;
|
||||
this.checkUserStatus();
|
||||
} else if (response.status === 404) {
|
||||
// If the user is not found, show the modal to create a new user
|
||||
this.showUserPromptModal(username);
|
||||
} else {
|
||||
throw new Error('Unexpected error checking user status');
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
this.showStatusMessage(`Error: ${error.message || 'Unknown error'}`, 'error');
|
||||
})
|
||||
.finally(() => {
|
||||
this.isSavingUser = false;
|
||||
});
|
||||
},
|
||||
|
||||
// Show user prompt modal
|
||||
showUserPromptModal(username) {
|
||||
this.modalUsername = username;
|
||||
document.getElementById('usernameSpan').textContent = username;
|
||||
this.toggleModal(true); // Ensure this shows the modal
|
||||
},
|
||||
|
||||
toggleModal(visible) {
|
||||
const modal = document.getElementById('userPromptModal');
|
||||
if (visible) {
|
||||
modal.classList.remove('hidden');
|
||||
modal.classList.add('visible');
|
||||
} else {
|
||||
modal.classList.remove('visible');
|
||||
// Delay the hidden state only after the CSS transition finishes
|
||||
setTimeout(() => {
|
||||
modal.classList.add('hidden');
|
||||
}, 200); // Matching the transition duration (0.2s)
|
||||
}
|
||||
},
|
||||
|
||||
// Create a new user
|
||||
createUser() {
|
||||
if (this.isCreatingUser) return;
|
||||
this.isCreatingUser = true;
|
||||
|
||||
const username = this.modalUsername;
|
||||
fetch('/user/create', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ name: username })
|
||||
})
|
||||
.then(response => {
|
||||
if (response.ok) {
|
||||
this.setCookie('userName', username, 7);
|
||||
this.userName = username;
|
||||
this.checkUserStatus();
|
||||
this.showStatusMessage(`User "${username}" has been created successfully!`, 'success');
|
||||
} else {
|
||||
return response.json().then(err => { throw new Error(err.detail); });
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
this.showStatusMessage(`Error creating user: ${error.message}`, 'error');
|
||||
})
|
||||
.finally(() => {
|
||||
this.toggleModal(false);
|
||||
this.isCreatingUser = false;
|
||||
});
|
||||
},
|
||||
|
||||
// Clear user data
|
||||
clearUser() {
|
||||
this.deleteCookie('userName');
|
||||
this.userName = null;
|
||||
this.userStatus = null;
|
||||
},
|
||||
|
||||
// Check if user exists and get status
|
||||
checkUser() {
|
||||
this.userName = this.getCookie('userName');
|
||||
if (this.userName) {
|
||||
this.checkUserStatus();
|
||||
}
|
||||
},
|
||||
|
||||
// Get the user's current status
|
||||
checkUserStatus() {
|
||||
const user = this.userName;
|
||||
fetch(`/user/status/${user}`)
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
this.userStatus = data;
|
||||
this.isClockedIn = this.userStatus === "in";
|
||||
this.checkIfClockedInToday();
|
||||
})
|
||||
.catch(() => {
|
||||
this.userStatus = "unknown";
|
||||
this.isClockedIn = false;
|
||||
this.isClockedInToday = false;
|
||||
});
|
||||
},
|
||||
|
||||
// Clock in or out the user
|
||||
clockInOut() {
|
||||
if (this.isProcessingClockInOut) return;
|
||||
this.isProcessingClockInOut = true;
|
||||
|
||||
const user = this.userName;
|
||||
if (this.isClockedIn) {
|
||||
fetch(`/time/${user}/out`, { method: 'POST' })
|
||||
.then((response) => {
|
||||
if (!response.ok) throw new Error('Error clocking out.');
|
||||
this.isClockedIn = false;
|
||||
this.userStatus = "out";
|
||||
this.isClockedInToday = true;
|
||||
this.showStatusMessage(`${user} has been clocked out successfully!`, 'success');
|
||||
})
|
||||
.catch(() => this.showStatusMessage(`Error clocking ${user} out.`, 'error'))
|
||||
.finally(() => this.isProcessingClockInOut = false);
|
||||
} else {
|
||||
fetch(`/time/${user}/in`, { method: 'POST' })
|
||||
.then((response) => {
|
||||
if (!response.ok) {
|
||||
return response.json().then(err => { throw new Error(err.message); });
|
||||
}
|
||||
this.isClockedIn = true;
|
||||
this.userStatus = "in";
|
||||
this.isClockedInToday = true;
|
||||
this.showStatusMessage(`${user} has been clocked in successfully!`, 'success');
|
||||
})
|
||||
.catch((error) => this.showStatusMessage(error.message, 'error'))
|
||||
.finally(() => this.isProcessingClockInOut = false);
|
||||
}
|
||||
},
|
||||
|
||||
// Delete today's clock in and clock out time
|
||||
deleteTodayEntry() {
|
||||
const user = this.userName;
|
||||
fetch(`/time/${user}/today`, { method: 'DELETE' })
|
||||
.then((response) => {
|
||||
if (!response.ok) throw new Error('Error deleting today\'s entry.');
|
||||
this.isClockedInToday = false;
|
||||
this.showStatusMessage(`Today's entry has been deleted successfully!`, 'success');
|
||||
})
|
||||
.catch(() => this.showStatusMessage(`Error deleting today's entry.`, 'error'));
|
||||
},
|
||||
|
||||
// Check if the user has clocked in today
|
||||
checkIfClockedInToday() {
|
||||
if (!this.userName) return;
|
||||
const user = this.userName;
|
||||
fetch(`/time/${user}/is_clocked_in_today`)
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
this.isClockedInToday = data.clocked_in_today;
|
||||
})
|
||||
.catch(() => {
|
||||
this.isClockedInToday = false;
|
||||
});
|
||||
},
|
||||
|
||||
// Fetch data for the specified period
|
||||
fetchData(type) {
|
||||
const user = this.userName;
|
||||
fetch(`/time/${user}/recall/${type}`)
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
console.log('Data received from backend:', data); // Add this line
|
||||
this.totalHours = data.total_hours ? data.total_hours.toFixed(2) : '0.00';
|
||||
this.daysWorked = data.days_worked || 0;
|
||||
this.tableData = data.entries.map(entry => {
|
||||
return {
|
||||
dateRange: entry.date || 'N/A',
|
||||
clockInTime: entry.clock_in ? this.formatTimeUTC(entry.clock_in) : 'N/A',
|
||||
clockOutTime: entry.clock_out ? this.formatTimeUTC(entry.clock_out) : 'N/A',
|
||||
totalTime: this.formatTotalTime(entry.total_time) || 'N/A'
|
||||
};
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
this.showStatusMessage('Error fetching data: ' + error.message, 'error');
|
||||
});
|
||||
},
|
||||
|
||||
// Format UTC time to the local timezone
|
||||
formatTimeUTC(datetimeString) {
|
||||
if (!datetimeString) return 'N/A';
|
||||
const date = new Date(datetimeString);
|
||||
const options = {
|
||||
hour: 'numeric',
|
||||
minute: 'numeric',
|
||||
hour12: true
|
||||
};
|
||||
return date.toLocaleTimeString([], options);
|
||||
},
|
||||
|
||||
// Format total time to X Hours X Min
|
||||
formatTotalTime(totalTimeString) {
|
||||
if (!totalTimeString || totalTimeString === 'N/A') return 'N/A';
|
||||
const timeParts = totalTimeString.match(/(\d+):(\d+):(\d+)(\.\d+)?/);
|
||||
if (timeParts) {
|
||||
const hours = parseInt(timeParts[1], 10);
|
||||
const minutes = parseInt(timeParts[2], 10);
|
||||
const seconds = parseFloat(timeParts[3] + (timeParts[4] || ''));
|
||||
let totalSeconds = hours * 3600 + minutes * 60 + seconds;
|
||||
if (totalSeconds >= 3600) {
|
||||
const displayHours = Math.floor(totalSeconds / 3600);
|
||||
totalSeconds %= 3600;
|
||||
const displayMinutes = Math.floor(totalSeconds / 60);
|
||||
return `${displayHours} Hours ${displayMinutes} Min`;
|
||||
} else if (totalSeconds >= 60) {
|
||||
const displayMinutes = Math.floor(totalSeconds / 60);
|
||||
return `${displayMinutes} Min`;
|
||||
} else {
|
||||
return `${Math.round(totalSeconds)} Sec`;
|
||||
}
|
||||
}
|
||||
return totalTimeString;
|
||||
},
|
||||
|
||||
// Export table data to CSV
|
||||
exportTable() {
|
||||
let csvContent = "data:text/csv;charset=utf-8,";
|
||||
csvContent += "Date Range,Clock In,Clock Out,Total Time\n";
|
||||
this.tableData.forEach(row => {
|
||||
const dataString = `${row.dateRange},${row.clockInTime},${row.clockOutTime},${row.totalTime}`;
|
||||
csvContent += dataString + "\n";
|
||||
});
|
||||
const encodedUri = encodeURI(csvContent);
|
||||
const link = document.createElement("a");
|
||||
link.setAttribute("href", encodedUri);
|
||||
link.setAttribute("download", "time_data.csv");
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
},
|
||||
|
||||
// Utility functions for cookies
|
||||
setCookie(name, value, days) {
|
||||
const d = new Date();
|
||||
d.setTime(d.getTime() + (days * 24 * 60 * 60 * 1000));
|
||||
document.cookie = `${name}=${value};expires=${d.toUTCString()};path=/`;
|
||||
},
|
||||
getCookie(name) {
|
||||
const value = `; ${document.cookie}`;
|
||||
const parts = value.split(`; ${name}=`);
|
||||
if (parts.length === 2) return parts.pop().split(';').shift();
|
||||
return null;
|
||||
},
|
||||
deleteCookie(name) {
|
||||
document.cookie = `${name}=;expires=Thu, 01 Jan 1970 00:00:00 GMT;path=/`;
|
||||
}
|
||||
};
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
1
frontend/node_modules/.bin/autoprefixer
generated
vendored
Normal file
1
frontend/node_modules/.bin/autoprefixer
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../autoprefixer/bin/autoprefixer
|
1
frontend/node_modules/.bin/browserslist
generated
vendored
Normal file
1
frontend/node_modules/.bin/browserslist
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../browserslist/cli.js
|
1
frontend/node_modules/.bin/cssesc
generated
vendored
Normal file
1
frontend/node_modules/.bin/cssesc
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../cssesc/bin/cssesc
|
1
frontend/node_modules/.bin/glob
generated
vendored
Normal file
1
frontend/node_modules/.bin/glob
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../glob/dist/esm/bin.mjs
|
1
frontend/node_modules/.bin/jiti
generated
vendored
Normal file
1
frontend/node_modules/.bin/jiti
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../jiti/bin/jiti.js
|
1
frontend/node_modules/.bin/nanoid
generated
vendored
Normal file
1
frontend/node_modules/.bin/nanoid
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../nanoid/bin/nanoid.cjs
|
1
frontend/node_modules/.bin/node-which
generated
vendored
Normal file
1
frontend/node_modules/.bin/node-which
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../which/bin/node-which
|
1
frontend/node_modules/.bin/resolve
generated
vendored
Normal file
1
frontend/node_modules/.bin/resolve
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../resolve/bin/resolve
|
1
frontend/node_modules/.bin/sucrase
generated
vendored
Normal file
1
frontend/node_modules/.bin/sucrase
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../sucrase/bin/sucrase
|
1
frontend/node_modules/.bin/sucrase-node
generated
vendored
Normal file
1
frontend/node_modules/.bin/sucrase-node
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../sucrase/bin/sucrase-node
|
1
frontend/node_modules/.bin/tailwind
generated
vendored
Normal file
1
frontend/node_modules/.bin/tailwind
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../tailwindcss/lib/cli.js
|
1
frontend/node_modules/.bin/tailwindcss
generated
vendored
Normal file
1
frontend/node_modules/.bin/tailwindcss
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../tailwindcss/lib/cli.js
|
1
frontend/node_modules/.bin/update-browserslist-db
generated
vendored
Normal file
1
frontend/node_modules/.bin/update-browserslist-db
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../update-browserslist-db/cli.js
|
1
frontend/node_modules/.bin/yaml
generated
vendored
Normal file
1
frontend/node_modules/.bin/yaml
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
../yaml/bin.mjs
|
1530
frontend/node_modules/.package-lock.json
generated
vendored
Normal file
1530
frontend/node_modules/.package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
128
frontend/node_modules/@alloc/quick-lru/index.d.ts
generated
vendored
Normal file
128
frontend/node_modules/@alloc/quick-lru/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,128 @@
|
|||
declare namespace QuickLRU {
|
||||
interface Options<KeyType, ValueType> {
|
||||
/**
|
||||
The maximum number of milliseconds an item should remain in the cache.
|
||||
|
||||
@default Infinity
|
||||
|
||||
By default, `maxAge` will be `Infinity`, which means that items will never expire.
|
||||
Lazy expiration upon the next write or read call.
|
||||
|
||||
Individual expiration of an item can be specified by the `set(key, value, maxAge)` method.
|
||||
*/
|
||||
readonly maxAge?: number;
|
||||
|
||||
/**
|
||||
The maximum number of items before evicting the least recently used items.
|
||||
*/
|
||||
readonly maxSize: number;
|
||||
|
||||
/**
|
||||
Called right before an item is evicted from the cache.
|
||||
|
||||
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
|
||||
*/
|
||||
onEviction?: (key: KeyType, value: ValueType) => void;
|
||||
}
|
||||
}
|
||||
|
||||
declare class QuickLRU<KeyType, ValueType>
|
||||
implements Iterable<[KeyType, ValueType]> {
|
||||
/**
|
||||
The stored item count.
|
||||
*/
|
||||
readonly size: number;
|
||||
|
||||
/**
|
||||
Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29).
|
||||
|
||||
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
|
||||
|
||||
@example
|
||||
```
|
||||
import QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({maxSize: 1000});
|
||||
|
||||
lru.set('🦄', '🌈');
|
||||
|
||||
lru.has('🦄');
|
||||
//=> true
|
||||
|
||||
lru.get('🦄');
|
||||
//=> '🌈'
|
||||
```
|
||||
*/
|
||||
constructor(options: QuickLRU.Options<KeyType, ValueType>);
|
||||
|
||||
[Symbol.iterator](): IterableIterator<[KeyType, ValueType]>;
|
||||
|
||||
/**
|
||||
Set an item. Returns the instance.
|
||||
|
||||
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified in the constructor, otherwise the item will never expire.
|
||||
|
||||
@returns The list instance.
|
||||
*/
|
||||
set(key: KeyType, value: ValueType, options?: {maxAge?: number}): this;
|
||||
|
||||
/**
|
||||
Get an item.
|
||||
|
||||
@returns The stored item or `undefined`.
|
||||
*/
|
||||
get(key: KeyType): ValueType | undefined;
|
||||
|
||||
/**
|
||||
Check if an item exists.
|
||||
*/
|
||||
has(key: KeyType): boolean;
|
||||
|
||||
/**
|
||||
Get an item without marking it as recently used.
|
||||
|
||||
@returns The stored item or `undefined`.
|
||||
*/
|
||||
peek(key: KeyType): ValueType | undefined;
|
||||
|
||||
/**
|
||||
Delete an item.
|
||||
|
||||
@returns `true` if the item is removed or `false` if the item doesn't exist.
|
||||
*/
|
||||
delete(key: KeyType): boolean;
|
||||
|
||||
/**
|
||||
Delete all items.
|
||||
*/
|
||||
clear(): void;
|
||||
|
||||
/**
|
||||
Update the `maxSize` in-place, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
|
||||
|
||||
Useful for on-the-fly tuning of cache sizes in live systems.
|
||||
*/
|
||||
resize(maxSize: number): void;
|
||||
|
||||
/**
|
||||
Iterable for all the keys.
|
||||
*/
|
||||
keys(): IterableIterator<KeyType>;
|
||||
|
||||
/**
|
||||
Iterable for all the values.
|
||||
*/
|
||||
values(): IterableIterator<ValueType>;
|
||||
|
||||
/**
|
||||
Iterable for all entries, starting with the oldest (ascending in recency).
|
||||
*/
|
||||
entriesAscending(): IterableIterator<[KeyType, ValueType]>;
|
||||
|
||||
/**
|
||||
Iterable for all entries, starting with the newest (descending in recency).
|
||||
*/
|
||||
entriesDescending(): IterableIterator<[KeyType, ValueType]>;
|
||||
}
|
||||
|
||||
export = QuickLRU;
|
263
frontend/node_modules/@alloc/quick-lru/index.js
generated
vendored
Normal file
263
frontend/node_modules/@alloc/quick-lru/index.js
generated
vendored
Normal file
|
@ -0,0 +1,263 @@
|
|||
'use strict';
|
||||
|
||||
class QuickLRU {
|
||||
constructor(options = {}) {
|
||||
if (!(options.maxSize && options.maxSize > 0)) {
|
||||
throw new TypeError('`maxSize` must be a number greater than 0');
|
||||
}
|
||||
|
||||
if (typeof options.maxAge === 'number' && options.maxAge === 0) {
|
||||
throw new TypeError('`maxAge` must be a number greater than 0');
|
||||
}
|
||||
|
||||
this.maxSize = options.maxSize;
|
||||
this.maxAge = options.maxAge || Infinity;
|
||||
this.onEviction = options.onEviction;
|
||||
this.cache = new Map();
|
||||
this.oldCache = new Map();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
_emitEvictions(cache) {
|
||||
if (typeof this.onEviction !== 'function') {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const [key, item] of cache) {
|
||||
this.onEviction(key, item.value);
|
||||
}
|
||||
}
|
||||
|
||||
_deleteIfExpired(key, item) {
|
||||
if (typeof item.expiry === 'number' && item.expiry <= Date.now()) {
|
||||
if (typeof this.onEviction === 'function') {
|
||||
this.onEviction(key, item.value);
|
||||
}
|
||||
|
||||
return this.delete(key);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
_getOrDeleteIfExpired(key, item) {
|
||||
const deleted = this._deleteIfExpired(key, item);
|
||||
if (deleted === false) {
|
||||
return item.value;
|
||||
}
|
||||
}
|
||||
|
||||
_getItemValue(key, item) {
|
||||
return item.expiry ? this._getOrDeleteIfExpired(key, item) : item.value;
|
||||
}
|
||||
|
||||
_peek(key, cache) {
|
||||
const item = cache.get(key);
|
||||
|
||||
return this._getItemValue(key, item);
|
||||
}
|
||||
|
||||
_set(key, value) {
|
||||
this.cache.set(key, value);
|
||||
this._size++;
|
||||
|
||||
if (this._size >= this.maxSize) {
|
||||
this._size = 0;
|
||||
this._emitEvictions(this.oldCache);
|
||||
this.oldCache = this.cache;
|
||||
this.cache = new Map();
|
||||
}
|
||||
}
|
||||
|
||||
_moveToRecent(key, item) {
|
||||
this.oldCache.delete(key);
|
||||
this._set(key, item);
|
||||
}
|
||||
|
||||
* _entriesAscending() {
|
||||
for (const item of this.oldCache) {
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of this.cache) {
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get(key) {
|
||||
if (this.cache.has(key)) {
|
||||
const item = this.cache.get(key);
|
||||
|
||||
return this._getItemValue(key, item);
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
const item = this.oldCache.get(key);
|
||||
if (this._deleteIfExpired(key, item) === false) {
|
||||
this._moveToRecent(key, item);
|
||||
return item.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set(key, value, {maxAge = this.maxAge === Infinity ? undefined : Date.now() + this.maxAge} = {}) {
|
||||
if (this.cache.has(key)) {
|
||||
this.cache.set(key, {
|
||||
value,
|
||||
maxAge
|
||||
});
|
||||
} else {
|
||||
this._set(key, {value, expiry: maxAge});
|
||||
}
|
||||
}
|
||||
|
||||
has(key) {
|
||||
if (this.cache.has(key)) {
|
||||
return !this._deleteIfExpired(key, this.cache.get(key));
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
return !this._deleteIfExpired(key, this.oldCache.get(key));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
peek(key) {
|
||||
if (this.cache.has(key)) {
|
||||
return this._peek(key, this.cache);
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
return this._peek(key, this.oldCache);
|
||||
}
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
const deleted = this.cache.delete(key);
|
||||
if (deleted) {
|
||||
this._size--;
|
||||
}
|
||||
|
||||
return this.oldCache.delete(key) || deleted;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.cache.clear();
|
||||
this.oldCache.clear();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
resize(newSize) {
|
||||
if (!(newSize && newSize > 0)) {
|
||||
throw new TypeError('`maxSize` must be a number greater than 0');
|
||||
}
|
||||
|
||||
const items = [...this._entriesAscending()];
|
||||
const removeCount = items.length - newSize;
|
||||
if (removeCount < 0) {
|
||||
this.cache = new Map(items);
|
||||
this.oldCache = new Map();
|
||||
this._size = items.length;
|
||||
} else {
|
||||
if (removeCount > 0) {
|
||||
this._emitEvictions(items.slice(0, removeCount));
|
||||
}
|
||||
|
||||
this.oldCache = new Map(items.slice(removeCount));
|
||||
this.cache = new Map();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
this.maxSize = newSize;
|
||||
}
|
||||
|
||||
* keys() {
|
||||
for (const [key] of this) {
|
||||
yield key;
|
||||
}
|
||||
}
|
||||
|
||||
* values() {
|
||||
for (const [, value] of this) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
|
||||
* [Symbol.iterator]() {
|
||||
for (const item of this.cache) {
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of this.oldCache) {
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* entriesDescending() {
|
||||
let items = [...this.cache];
|
||||
for (let i = items.length - 1; i >= 0; --i) {
|
||||
const item = items[i];
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
items = [...this.oldCache];
|
||||
for (let i = items.length - 1; i >= 0; --i) {
|
||||
const item = items[i];
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* entriesAscending() {
|
||||
for (const [key, value] of this._entriesAscending()) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
get size() {
|
||||
if (!this._size) {
|
||||
return this.oldCache.size;
|
||||
}
|
||||
|
||||
let oldCacheSize = 0;
|
||||
for (const key of this.oldCache.keys()) {
|
||||
if (!this.cache.has(key)) {
|
||||
oldCacheSize++;
|
||||
}
|
||||
}
|
||||
|
||||
return Math.min(this._size + oldCacheSize, this.maxSize);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QuickLRU;
|
9
frontend/node_modules/@alloc/quick-lru/license
generated
vendored
Normal file
9
frontend/node_modules/@alloc/quick-lru/license
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
43
frontend/node_modules/@alloc/quick-lru/package.json
generated
vendored
Normal file
43
frontend/node_modules/@alloc/quick-lru/package.json
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
{
|
||||
"name": "@alloc/quick-lru",
|
||||
"version": "5.2.0",
|
||||
"description": "Simple “Least Recently Used” (LRU) cache",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/quick-lru",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && nyc ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"lru",
|
||||
"quick",
|
||||
"cache",
|
||||
"caching",
|
||||
"least",
|
||||
"recently",
|
||||
"used",
|
||||
"fast",
|
||||
"map",
|
||||
"hash",
|
||||
"buffer"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^2.0.0",
|
||||
"coveralls": "^3.0.3",
|
||||
"nyc": "^15.0.0",
|
||||
"tsd": "^0.11.0",
|
||||
"xo": "^0.26.0"
|
||||
}
|
||||
}
|
139
frontend/node_modules/@alloc/quick-lru/readme.md
generated
vendored
Normal file
139
frontend/node_modules/@alloc/quick-lru/readme.md
generated
vendored
Normal file
|
@ -0,0 +1,139 @@
|
|||
# quick-lru [](https://travis-ci.org/sindresorhus/quick-lru) [](https://coveralls.io/github/sindresorhus/quick-lru?branch=master)
|
||||
|
||||
> Simple [“Least Recently Used” (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29)
|
||||
|
||||
Useful when you need to cache something and limit memory usage.
|
||||
|
||||
Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install quick-lru
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({maxSize: 1000});
|
||||
|
||||
lru.set('🦄', '🌈');
|
||||
|
||||
lru.has('🦄');
|
||||
//=> true
|
||||
|
||||
lru.get('🦄');
|
||||
//=> '🌈'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### new QuickLRU(options?)
|
||||
|
||||
Returns a new instance.
|
||||
|
||||
### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
#### maxSize
|
||||
|
||||
*Required*\
|
||||
Type: `number`
|
||||
|
||||
The maximum number of items before evicting the least recently used items.
|
||||
|
||||
#### maxAge
|
||||
|
||||
Type: `number`\
|
||||
Default: `Infinity`
|
||||
|
||||
The maximum number of milliseconds an item should remain in cache.
|
||||
By default maxAge will be Infinity, which means that items will never expire.
|
||||
|
||||
Lazy expiration happens upon the next `write` or `read` call.
|
||||
|
||||
Individual expiration of an item can be specified by the `set(key, value, options)` method.
|
||||
|
||||
#### onEviction
|
||||
|
||||
*Optional*\
|
||||
Type: `(key, value) => void`
|
||||
|
||||
Called right before an item is evicted from the cache.
|
||||
|
||||
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
|
||||
|
||||
### Instance
|
||||
|
||||
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
|
||||
|
||||
Both `key` and `value` can be of any type.
|
||||
|
||||
#### .set(key, value, options?)
|
||||
|
||||
Set an item. Returns the instance.
|
||||
|
||||
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified on the constructor, otherwise the item will never expire.
|
||||
|
||||
#### .get(key)
|
||||
|
||||
Get an item.
|
||||
|
||||
#### .has(key)
|
||||
|
||||
Check if an item exists.
|
||||
|
||||
#### .peek(key)
|
||||
|
||||
Get an item without marking it as recently used.
|
||||
|
||||
#### .delete(key)
|
||||
|
||||
Delete an item.
|
||||
|
||||
Returns `true` if the item is removed or `false` if the item doesn't exist.
|
||||
|
||||
#### .clear()
|
||||
|
||||
Delete all items.
|
||||
|
||||
#### .resize(maxSize)
|
||||
|
||||
Update the `maxSize`, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
|
||||
|
||||
Useful for on-the-fly tuning of cache sizes in live systems.
|
||||
|
||||
#### .keys()
|
||||
|
||||
Iterable for all the keys.
|
||||
|
||||
#### .values()
|
||||
|
||||
Iterable for all the values.
|
||||
|
||||
#### .entriesAscending()
|
||||
|
||||
Iterable for all entries, starting with the oldest (ascending in recency).
|
||||
|
||||
#### .entriesDescending()
|
||||
|
||||
Iterable for all entries, starting with the newest (descending in recency).
|
||||
|
||||
#### .size
|
||||
|
||||
The stored item count.
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-quick-lru?utm_source=npm-quick-lru&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
14
frontend/node_modules/@isaacs/cliui/LICENSE.txt
generated
vendored
Normal file
14
frontend/node_modules/@isaacs/cliui/LICENSE.txt
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
Copyright (c) 2015, Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software
|
||||
for any purpose with or without fee is hereby granted, provided
|
||||
that the above copyright notice and this permission notice
|
||||
appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE
|
||||
LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES
|
||||
OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
||||
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
|
||||
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
143
frontend/node_modules/@isaacs/cliui/README.md
generated
vendored
Normal file
143
frontend/node_modules/@isaacs/cliui/README.md
generated
vendored
Normal file
|
@ -0,0 +1,143 @@
|
|||
# @isaacs/cliui
|
||||
|
||||
Temporary fork of [cliui](http://npm.im/cliui).
|
||||
|
||||

|
||||
[](https://www.npmjs.com/package/cliui)
|
||||
[](https://conventionalcommits.org)
|
||||

|
||||
|
||||
easily create complex multi-column command-line-interfaces.
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
const ui = require('cliui')()
|
||||
|
||||
ui.div('Usage: $0 [command] [options]')
|
||||
|
||||
ui.div({
|
||||
text: 'Options:',
|
||||
padding: [2, 0, 1, 0]
|
||||
})
|
||||
|
||||
ui.div(
|
||||
{
|
||||
text: "-f, --file",
|
||||
width: 20,
|
||||
padding: [0, 4, 0, 4]
|
||||
},
|
||||
{
|
||||
text: "the file to load." +
|
||||
chalk.green("(if this description is long it wraps).")
|
||||
,
|
||||
width: 20
|
||||
},
|
||||
{
|
||||
text: chalk.red("[required]"),
|
||||
align: 'right'
|
||||
}
|
||||
)
|
||||
|
||||
console.log(ui.toString())
|
||||
```
|
||||
|
||||
## Deno/ESM Support
|
||||
|
||||
As of `v7` `cliui` supports [Deno](https://github.com/denoland/deno) and
|
||||
[ESM](https://nodejs.org/api/esm.html#esm_ecmascript_modules):
|
||||
|
||||
```typescript
|
||||
import cliui from "https://deno.land/x/cliui/deno.ts";
|
||||
|
||||
const ui = cliui({})
|
||||
|
||||
ui.div('Usage: $0 [command] [options]')
|
||||
|
||||
ui.div({
|
||||
text: 'Options:',
|
||||
padding: [2, 0, 1, 0]
|
||||
})
|
||||
|
||||
ui.div({
|
||||
text: "-f, --file",
|
||||
width: 20,
|
||||
padding: [0, 4, 0, 4]
|
||||
})
|
||||
|
||||
console.log(ui.toString())
|
||||
```
|
||||
|
||||
<img width="500" src="screenshot.png">
|
||||
|
||||
## Layout DSL
|
||||
|
||||
cliui exposes a simple layout DSL:
|
||||
|
||||
If you create a single `ui.div`, passing a string rather than an
|
||||
object:
|
||||
|
||||
* `\n`: characters will be interpreted as new rows.
|
||||
* `\t`: characters will be interpreted as new columns.
|
||||
* `\s`: characters will be interpreted as padding.
|
||||
|
||||
**as an example...**
|
||||
|
||||
```js
|
||||
var ui = require('./')({
|
||||
width: 60
|
||||
})
|
||||
|
||||
ui.div(
|
||||
'Usage: node ./bin/foo.js\n' +
|
||||
' <regex>\t provide a regex\n' +
|
||||
' <glob>\t provide a glob\t [required]'
|
||||
)
|
||||
|
||||
console.log(ui.toString())
|
||||
```
|
||||
|
||||
**will output:**
|
||||
|
||||
```shell
|
||||
Usage: node ./bin/foo.js
|
||||
<regex> provide a regex
|
||||
<glob> provide a glob [required]
|
||||
```
|
||||
|
||||
## Methods
|
||||
|
||||
```js
|
||||
cliui = require('cliui')
|
||||
```
|
||||
|
||||
### cliui({width: integer})
|
||||
|
||||
Specify the maximum width of the UI being generated.
|
||||
If no width is provided, cliui will try to get the current window's width and use it, and if that doesn't work, width will be set to `80`.
|
||||
|
||||
### cliui({wrap: boolean})
|
||||
|
||||
Enable or disable the wrapping of text in a column.
|
||||
|
||||
### cliui.div(column, column, column)
|
||||
|
||||
Create a row with any number of columns, a column
|
||||
can either be a string, or an object with the following
|
||||
options:
|
||||
|
||||
* **text:** some text to place in the column.
|
||||
* **width:** the width of a column.
|
||||
* **align:** alignment, `right` or `center`.
|
||||
* **padding:** `[top, right, bottom, left]`.
|
||||
* **border:** should a border be placed around the div?
|
||||
|
||||
### cliui.span(column, column, column)
|
||||
|
||||
Similar to `div`, except the next row will be appended without
|
||||
a new line being created.
|
||||
|
||||
### cliui.resetOutput()
|
||||
|
||||
Resets the UI elements of the current cliui instance, maintaining the values
|
||||
set for `width` and `wrap`.
|
317
frontend/node_modules/@isaacs/cliui/build/index.cjs
generated
vendored
Normal file
317
frontend/node_modules/@isaacs/cliui/build/index.cjs
generated
vendored
Normal file
|
@ -0,0 +1,317 @@
|
|||
'use strict';
|
||||
|
||||
const align = {
|
||||
right: alignRight,
|
||||
center: alignCenter
|
||||
};
|
||||
const top = 0;
|
||||
const right = 1;
|
||||
const bottom = 2;
|
||||
const left = 3;
|
||||
class UI {
|
||||
constructor(opts) {
|
||||
var _a;
|
||||
this.width = opts.width;
|
||||
/* c8 ignore start */
|
||||
this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
|
||||
/* c8 ignore stop */
|
||||
this.rows = [];
|
||||
}
|
||||
span(...args) {
|
||||
const cols = this.div(...args);
|
||||
cols.span = true;
|
||||
}
|
||||
resetOutput() {
|
||||
this.rows = [];
|
||||
}
|
||||
div(...args) {
|
||||
if (args.length === 0) {
|
||||
this.div('');
|
||||
}
|
||||
if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
|
||||
return this.applyLayoutDSL(args[0]);
|
||||
}
|
||||
const cols = args.map(arg => {
|
||||
if (typeof arg === 'string') {
|
||||
return this.colFromString(arg);
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
this.rows.push(cols);
|
||||
return cols;
|
||||
}
|
||||
shouldApplyLayoutDSL(...args) {
|
||||
return args.length === 1 && typeof args[0] === 'string' &&
|
||||
/[\t\n]/.test(args[0]);
|
||||
}
|
||||
applyLayoutDSL(str) {
|
||||
const rows = str.split('\n').map(row => row.split('\t'));
|
||||
let leftColumnWidth = 0;
|
||||
// simple heuristic for layout, make sure the
|
||||
// second column lines up along the left-hand.
|
||||
// don't allow the first column to take up more
|
||||
// than 50% of the screen.
|
||||
rows.forEach(columns => {
|
||||
if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
|
||||
leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
|
||||
}
|
||||
});
|
||||
// generate a table:
|
||||
// replacing ' ' with padding calculations.
|
||||
// using the algorithmically generated width.
|
||||
rows.forEach(columns => {
|
||||
this.div(...columns.map((r, i) => {
|
||||
return {
|
||||
text: r.trim(),
|
||||
padding: this.measurePadding(r),
|
||||
width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
|
||||
};
|
||||
}));
|
||||
});
|
||||
return this.rows[this.rows.length - 1];
|
||||
}
|
||||
colFromString(text) {
|
||||
return {
|
||||
text,
|
||||
padding: this.measurePadding(text)
|
||||
};
|
||||
}
|
||||
measurePadding(str) {
|
||||
// measure padding without ansi escape codes
|
||||
const noAnsi = mixin.stripAnsi(str);
|
||||
return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
|
||||
}
|
||||
toString() {
|
||||
const lines = [];
|
||||
this.rows.forEach(row => {
|
||||
this.rowToString(row, lines);
|
||||
});
|
||||
// don't display any lines with the
|
||||
// hidden flag set.
|
||||
return lines
|
||||
.filter(line => !line.hidden)
|
||||
.map(line => line.text)
|
||||
.join('\n');
|
||||
}
|
||||
rowToString(row, lines) {
|
||||
this.rasterize(row).forEach((rrow, r) => {
|
||||
let str = '';
|
||||
rrow.forEach((col, c) => {
|
||||
const { width } = row[c]; // the width with padding.
|
||||
const wrapWidth = this.negatePadding(row[c]); // the width without padding.
|
||||
let ts = col; // temporary string used during alignment/padding.
|
||||
if (wrapWidth > mixin.stringWidth(col)) {
|
||||
ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
|
||||
}
|
||||
// align the string within its column.
|
||||
if (row[c].align && row[c].align !== 'left' && this.wrap) {
|
||||
const fn = align[row[c].align];
|
||||
ts = fn(ts, wrapWidth);
|
||||
if (mixin.stringWidth(ts) < wrapWidth) {
|
||||
/* c8 ignore start */
|
||||
const w = width || 0;
|
||||
/* c8 ignore stop */
|
||||
ts += ' '.repeat(w - mixin.stringWidth(ts) - 1);
|
||||
}
|
||||
}
|
||||
// apply border and padding to string.
|
||||
const padding = row[c].padding || [0, 0, 0, 0];
|
||||
if (padding[left]) {
|
||||
str += ' '.repeat(padding[left]);
|
||||
}
|
||||
str += addBorder(row[c], ts, '| ');
|
||||
str += ts;
|
||||
str += addBorder(row[c], ts, ' |');
|
||||
if (padding[right]) {
|
||||
str += ' '.repeat(padding[right]);
|
||||
}
|
||||
// if prior row is span, try to render the
|
||||
// current row on the prior line.
|
||||
if (r === 0 && lines.length > 0) {
|
||||
str = this.renderInline(str, lines[lines.length - 1]);
|
||||
}
|
||||
});
|
||||
// remove trailing whitespace.
|
||||
lines.push({
|
||||
text: str.replace(/ +$/, ''),
|
||||
span: row.span
|
||||
});
|
||||
});
|
||||
return lines;
|
||||
}
|
||||
// if the full 'source' can render in
|
||||
// the target line, do so.
|
||||
renderInline(source, previousLine) {
|
||||
const match = source.match(/^ */);
|
||||
/* c8 ignore start */
|
||||
const leadingWhitespace = match ? match[0].length : 0;
|
||||
/* c8 ignore stop */
|
||||
const target = previousLine.text;
|
||||
const targetTextWidth = mixin.stringWidth(target.trimEnd());
|
||||
if (!previousLine.span) {
|
||||
return source;
|
||||
}
|
||||
// if we're not applying wrapping logic,
|
||||
// just always append to the span.
|
||||
if (!this.wrap) {
|
||||
previousLine.hidden = true;
|
||||
return target + source;
|
||||
}
|
||||
if (leadingWhitespace < targetTextWidth) {
|
||||
return source;
|
||||
}
|
||||
previousLine.hidden = true;
|
||||
return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart();
|
||||
}
|
||||
rasterize(row) {
|
||||
const rrows = [];
|
||||
const widths = this.columnWidths(row);
|
||||
let wrapped;
|
||||
// word wrap all columns, and create
|
||||
// a data-structure that is easy to rasterize.
|
||||
row.forEach((col, c) => {
|
||||
// leave room for left and right padding.
|
||||
col.width = widths[c];
|
||||
if (this.wrap) {
|
||||
wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
|
||||
}
|
||||
else {
|
||||
wrapped = col.text.split('\n');
|
||||
}
|
||||
if (col.border) {
|
||||
wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
|
||||
wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
|
||||
}
|
||||
// add top and bottom padding.
|
||||
if (col.padding) {
|
||||
wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
|
||||
wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
|
||||
}
|
||||
wrapped.forEach((str, r) => {
|
||||
if (!rrows[r]) {
|
||||
rrows.push([]);
|
||||
}
|
||||
const rrow = rrows[r];
|
||||
for (let i = 0; i < c; i++) {
|
||||
if (rrow[i] === undefined) {
|
||||
rrow.push('');
|
||||
}
|
||||
}
|
||||
rrow.push(str);
|
||||
});
|
||||
});
|
||||
return rrows;
|
||||
}
|
||||
negatePadding(col) {
|
||||
/* c8 ignore start */
|
||||
let wrapWidth = col.width || 0;
|
||||
/* c8 ignore stop */
|
||||
if (col.padding) {
|
||||
wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
|
||||
}
|
||||
if (col.border) {
|
||||
wrapWidth -= 4;
|
||||
}
|
||||
return wrapWidth;
|
||||
}
|
||||
columnWidths(row) {
|
||||
if (!this.wrap) {
|
||||
return row.map(col => {
|
||||
return col.width || mixin.stringWidth(col.text);
|
||||
});
|
||||
}
|
||||
let unset = row.length;
|
||||
let remainingWidth = this.width;
|
||||
// column widths can be set in config.
|
||||
const widths = row.map(col => {
|
||||
if (col.width) {
|
||||
unset--;
|
||||
remainingWidth -= col.width;
|
||||
return col.width;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
// any unset widths should be calculated.
|
||||
/* c8 ignore start */
|
||||
const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
|
||||
/* c8 ignore stop */
|
||||
return widths.map((w, i) => {
|
||||
if (w === undefined) {
|
||||
return Math.max(unsetWidth, _minWidth(row[i]));
|
||||
}
|
||||
return w;
|
||||
});
|
||||
}
|
||||
}
|
||||
function addBorder(col, ts, style) {
|
||||
if (col.border) {
|
||||
if (/[.']-+[.']/.test(ts)) {
|
||||
return '';
|
||||
}
|
||||
if (ts.trim().length !== 0) {
|
||||
return style;
|
||||
}
|
||||
return ' ';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
// calculates the minimum width of
|
||||
// a column, based on padding preferences.
|
||||
function _minWidth(col) {
|
||||
const padding = col.padding || [];
|
||||
const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
|
||||
if (col.border) {
|
||||
return minWidth + 4;
|
||||
}
|
||||
return minWidth;
|
||||
}
|
||||
function getWindowWidth() {
|
||||
/* c8 ignore start */
|
||||
if (typeof process === 'object' && process.stdout && process.stdout.columns) {
|
||||
return process.stdout.columns;
|
||||
}
|
||||
return 80;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
function alignRight(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
if (strWidth < width) {
|
||||
return ' '.repeat(width - strWidth) + str;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function alignCenter(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
/* c8 ignore start */
|
||||
if (strWidth >= width) {
|
||||
return str;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
return ' '.repeat((width - strWidth) >> 1) + str;
|
||||
}
|
||||
let mixin;
|
||||
function cliui(opts, _mixin) {
|
||||
mixin = _mixin;
|
||||
return new UI({
|
||||
/* c8 ignore start */
|
||||
width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
|
||||
wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
|
||||
/* c8 ignore stop */
|
||||
});
|
||||
}
|
||||
|
||||
// Bootstrap cliui with CommonJS dependencies:
|
||||
const stringWidth = require('string-width-cjs');
|
||||
const stripAnsi = require('strip-ansi-cjs');
|
||||
const wrap = require('wrap-ansi-cjs');
|
||||
function ui(opts) {
|
||||
return cliui(opts, {
|
||||
stringWidth,
|
||||
stripAnsi,
|
||||
wrap
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = ui;
|
43
frontend/node_modules/@isaacs/cliui/build/index.d.cts
generated
vendored
Normal file
43
frontend/node_modules/@isaacs/cliui/build/index.d.cts
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
interface UIOptions {
|
||||
width: number;
|
||||
wrap?: boolean;
|
||||
rows?: string[];
|
||||
}
|
||||
interface Column {
|
||||
text: string;
|
||||
width?: number;
|
||||
align?: "right" | "left" | "center";
|
||||
padding: number[];
|
||||
border?: boolean;
|
||||
}
|
||||
interface ColumnArray extends Array<Column> {
|
||||
span: boolean;
|
||||
}
|
||||
interface Line {
|
||||
hidden?: boolean;
|
||||
text: string;
|
||||
span?: boolean;
|
||||
}
|
||||
declare class UI {
|
||||
width: number;
|
||||
wrap: boolean;
|
||||
rows: ColumnArray[];
|
||||
constructor(opts: UIOptions);
|
||||
span(...args: ColumnArray): void;
|
||||
resetOutput(): void;
|
||||
div(...args: (Column | string)[]): ColumnArray;
|
||||
private shouldApplyLayoutDSL;
|
||||
private applyLayoutDSL;
|
||||
private colFromString;
|
||||
private measurePadding;
|
||||
toString(): string;
|
||||
rowToString(row: ColumnArray, lines: Line[]): Line[];
|
||||
// if the full 'source' can render in
|
||||
// the target line, do so.
|
||||
private renderInline;
|
||||
private rasterize;
|
||||
private negatePadding;
|
||||
private columnWidths;
|
||||
}
|
||||
declare function ui(opts: UIOptions): UI;
|
||||
export { ui as default };
|
302
frontend/node_modules/@isaacs/cliui/build/lib/index.js
generated
vendored
Normal file
302
frontend/node_modules/@isaacs/cliui/build/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,302 @@
|
|||
'use strict';
|
||||
const align = {
|
||||
right: alignRight,
|
||||
center: alignCenter
|
||||
};
|
||||
const top = 0;
|
||||
const right = 1;
|
||||
const bottom = 2;
|
||||
const left = 3;
|
||||
export class UI {
|
||||
constructor(opts) {
|
||||
var _a;
|
||||
this.width = opts.width;
|
||||
/* c8 ignore start */
|
||||
this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
|
||||
/* c8 ignore stop */
|
||||
this.rows = [];
|
||||
}
|
||||
span(...args) {
|
||||
const cols = this.div(...args);
|
||||
cols.span = true;
|
||||
}
|
||||
resetOutput() {
|
||||
this.rows = [];
|
||||
}
|
||||
div(...args) {
|
||||
if (args.length === 0) {
|
||||
this.div('');
|
||||
}
|
||||
if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
|
||||
return this.applyLayoutDSL(args[0]);
|
||||
}
|
||||
const cols = args.map(arg => {
|
||||
if (typeof arg === 'string') {
|
||||
return this.colFromString(arg);
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
this.rows.push(cols);
|
||||
return cols;
|
||||
}
|
||||
shouldApplyLayoutDSL(...args) {
|
||||
return args.length === 1 && typeof args[0] === 'string' &&
|
||||
/[\t\n]/.test(args[0]);
|
||||
}
|
||||
applyLayoutDSL(str) {
|
||||
const rows = str.split('\n').map(row => row.split('\t'));
|
||||
let leftColumnWidth = 0;
|
||||
// simple heuristic for layout, make sure the
|
||||
// second column lines up along the left-hand.
|
||||
// don't allow the first column to take up more
|
||||
// than 50% of the screen.
|
||||
rows.forEach(columns => {
|
||||
if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
|
||||
leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
|
||||
}
|
||||
});
|
||||
// generate a table:
|
||||
// replacing ' ' with padding calculations.
|
||||
// using the algorithmically generated width.
|
||||
rows.forEach(columns => {
|
||||
this.div(...columns.map((r, i) => {
|
||||
return {
|
||||
text: r.trim(),
|
||||
padding: this.measurePadding(r),
|
||||
width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
|
||||
};
|
||||
}));
|
||||
});
|
||||
return this.rows[this.rows.length - 1];
|
||||
}
|
||||
colFromString(text) {
|
||||
return {
|
||||
text,
|
||||
padding: this.measurePadding(text)
|
||||
};
|
||||
}
|
||||
measurePadding(str) {
|
||||
// measure padding without ansi escape codes
|
||||
const noAnsi = mixin.stripAnsi(str);
|
||||
return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
|
||||
}
|
||||
toString() {
|
||||
const lines = [];
|
||||
this.rows.forEach(row => {
|
||||
this.rowToString(row, lines);
|
||||
});
|
||||
// don't display any lines with the
|
||||
// hidden flag set.
|
||||
return lines
|
||||
.filter(line => !line.hidden)
|
||||
.map(line => line.text)
|
||||
.join('\n');
|
||||
}
|
||||
rowToString(row, lines) {
|
||||
this.rasterize(row).forEach((rrow, r) => {
|
||||
let str = '';
|
||||
rrow.forEach((col, c) => {
|
||||
const { width } = row[c]; // the width with padding.
|
||||
const wrapWidth = this.negatePadding(row[c]); // the width without padding.
|
||||
let ts = col; // temporary string used during alignment/padding.
|
||||
if (wrapWidth > mixin.stringWidth(col)) {
|
||||
ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
|
||||
}
|
||||
// align the string within its column.
|
||||
if (row[c].align && row[c].align !== 'left' && this.wrap) {
|
||||
const fn = align[row[c].align];
|
||||
ts = fn(ts, wrapWidth);
|
||||
if (mixin.stringWidth(ts) < wrapWidth) {
|
||||
/* c8 ignore start */
|
||||
const w = width || 0;
|
||||
/* c8 ignore stop */
|
||||
ts += ' '.repeat(w - mixin.stringWidth(ts) - 1);
|
||||
}
|
||||
}
|
||||
// apply border and padding to string.
|
||||
const padding = row[c].padding || [0, 0, 0, 0];
|
||||
if (padding[left]) {
|
||||
str += ' '.repeat(padding[left]);
|
||||
}
|
||||
str += addBorder(row[c], ts, '| ');
|
||||
str += ts;
|
||||
str += addBorder(row[c], ts, ' |');
|
||||
if (padding[right]) {
|
||||
str += ' '.repeat(padding[right]);
|
||||
}
|
||||
// if prior row is span, try to render the
|
||||
// current row on the prior line.
|
||||
if (r === 0 && lines.length > 0) {
|
||||
str = this.renderInline(str, lines[lines.length - 1]);
|
||||
}
|
||||
});
|
||||
// remove trailing whitespace.
|
||||
lines.push({
|
||||
text: str.replace(/ +$/, ''),
|
||||
span: row.span
|
||||
});
|
||||
});
|
||||
return lines;
|
||||
}
|
||||
// if the full 'source' can render in
|
||||
// the target line, do so.
|
||||
renderInline(source, previousLine) {
|
||||
const match = source.match(/^ */);
|
||||
/* c8 ignore start */
|
||||
const leadingWhitespace = match ? match[0].length : 0;
|
||||
/* c8 ignore stop */
|
||||
const target = previousLine.text;
|
||||
const targetTextWidth = mixin.stringWidth(target.trimEnd());
|
||||
if (!previousLine.span) {
|
||||
return source;
|
||||
}
|
||||
// if we're not applying wrapping logic,
|
||||
// just always append to the span.
|
||||
if (!this.wrap) {
|
||||
previousLine.hidden = true;
|
||||
return target + source;
|
||||
}
|
||||
if (leadingWhitespace < targetTextWidth) {
|
||||
return source;
|
||||
}
|
||||
previousLine.hidden = true;
|
||||
return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart();
|
||||
}
|
||||
rasterize(row) {
|
||||
const rrows = [];
|
||||
const widths = this.columnWidths(row);
|
||||
let wrapped;
|
||||
// word wrap all columns, and create
|
||||
// a data-structure that is easy to rasterize.
|
||||
row.forEach((col, c) => {
|
||||
// leave room for left and right padding.
|
||||
col.width = widths[c];
|
||||
if (this.wrap) {
|
||||
wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
|
||||
}
|
||||
else {
|
||||
wrapped = col.text.split('\n');
|
||||
}
|
||||
if (col.border) {
|
||||
wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
|
||||
wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
|
||||
}
|
||||
// add top and bottom padding.
|
||||
if (col.padding) {
|
||||
wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
|
||||
wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
|
||||
}
|
||||
wrapped.forEach((str, r) => {
|
||||
if (!rrows[r]) {
|
||||
rrows.push([]);
|
||||
}
|
||||
const rrow = rrows[r];
|
||||
for (let i = 0; i < c; i++) {
|
||||
if (rrow[i] === undefined) {
|
||||
rrow.push('');
|
||||
}
|
||||
}
|
||||
rrow.push(str);
|
||||
});
|
||||
});
|
||||
return rrows;
|
||||
}
|
||||
negatePadding(col) {
|
||||
/* c8 ignore start */
|
||||
let wrapWidth = col.width || 0;
|
||||
/* c8 ignore stop */
|
||||
if (col.padding) {
|
||||
wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
|
||||
}
|
||||
if (col.border) {
|
||||
wrapWidth -= 4;
|
||||
}
|
||||
return wrapWidth;
|
||||
}
|
||||
columnWidths(row) {
|
||||
if (!this.wrap) {
|
||||
return row.map(col => {
|
||||
return col.width || mixin.stringWidth(col.text);
|
||||
});
|
||||
}
|
||||
let unset = row.length;
|
||||
let remainingWidth = this.width;
|
||||
// column widths can be set in config.
|
||||
const widths = row.map(col => {
|
||||
if (col.width) {
|
||||
unset--;
|
||||
remainingWidth -= col.width;
|
||||
return col.width;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
// any unset widths should be calculated.
|
||||
/* c8 ignore start */
|
||||
const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
|
||||
/* c8 ignore stop */
|
||||
return widths.map((w, i) => {
|
||||
if (w === undefined) {
|
||||
return Math.max(unsetWidth, _minWidth(row[i]));
|
||||
}
|
||||
return w;
|
||||
});
|
||||
}
|
||||
}
|
||||
function addBorder(col, ts, style) {
|
||||
if (col.border) {
|
||||
if (/[.']-+[.']/.test(ts)) {
|
||||
return '';
|
||||
}
|
||||
if (ts.trim().length !== 0) {
|
||||
return style;
|
||||
}
|
||||
return ' ';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
// calculates the minimum width of
|
||||
// a column, based on padding preferences.
|
||||
function _minWidth(col) {
|
||||
const padding = col.padding || [];
|
||||
const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
|
||||
if (col.border) {
|
||||
return minWidth + 4;
|
||||
}
|
||||
return minWidth;
|
||||
}
|
||||
function getWindowWidth() {
|
||||
/* c8 ignore start */
|
||||
if (typeof process === 'object' && process.stdout && process.stdout.columns) {
|
||||
return process.stdout.columns;
|
||||
}
|
||||
return 80;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
function alignRight(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
if (strWidth < width) {
|
||||
return ' '.repeat(width - strWidth) + str;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function alignCenter(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
/* c8 ignore start */
|
||||
if (strWidth >= width) {
|
||||
return str;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
return ' '.repeat((width - strWidth) >> 1) + str;
|
||||
}
|
||||
let mixin;
|
||||
export function cliui(opts, _mixin) {
|
||||
mixin = _mixin;
|
||||
return new UI({
|
||||
/* c8 ignore start */
|
||||
width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
|
||||
wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
|
||||
/* c8 ignore stop */
|
||||
});
|
||||
}
|
14
frontend/node_modules/@isaacs/cliui/index.mjs
generated
vendored
Normal file
14
frontend/node_modules/@isaacs/cliui/index.mjs
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
// Bootstrap cliui with ESM dependencies:
|
||||
import { cliui } from './build/lib/index.js'
|
||||
|
||||
import stringWidth from 'string-width'
|
||||
import stripAnsi from 'strip-ansi'
|
||||
import wrap from 'wrap-ansi'
|
||||
|
||||
export default function ui (opts) {
|
||||
return cliui(opts, {
|
||||
stringWidth,
|
||||
stripAnsi,
|
||||
wrap
|
||||
})
|
||||
}
|
86
frontend/node_modules/@isaacs/cliui/package.json
generated
vendored
Normal file
86
frontend/node_modules/@isaacs/cliui/package.json
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
{
|
||||
"name": "@isaacs/cliui",
|
||||
"version": "8.0.2",
|
||||
"description": "easily create complex multi-column command-line-interfaces",
|
||||
"main": "build/index.cjs",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./index.mjs",
|
||||
"require": "./build/index.cjs"
|
||||
},
|
||||
"./build/index.cjs"
|
||||
]
|
||||
},
|
||||
"type": "module",
|
||||
"module": "./index.mjs",
|
||||
"scripts": {
|
||||
"check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'",
|
||||
"fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'",
|
||||
"pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs",
|
||||
"test": "c8 mocha ./test/*.cjs",
|
||||
"test:esm": "c8 mocha ./test/**/*.mjs",
|
||||
"postest": "check",
|
||||
"coverage": "c8 report --check-coverage",
|
||||
"precompile": "rimraf build",
|
||||
"compile": "tsc",
|
||||
"postcompile": "npm run build:cjs",
|
||||
"build:cjs": "rollup -c",
|
||||
"prepare": "npm run compile"
|
||||
},
|
||||
"repository": "yargs/cliui",
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"**/example/**"
|
||||
],
|
||||
"globals": [
|
||||
"it"
|
||||
]
|
||||
},
|
||||
"keywords": [
|
||||
"cli",
|
||||
"command-line",
|
||||
"layout",
|
||||
"design",
|
||||
"console",
|
||||
"wrap",
|
||||
"table"
|
||||
],
|
||||
"author": "Ben Coe <ben@npmjs.com>",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"string-width": "^5.1.2",
|
||||
"string-width-cjs": "npm:string-width@^4.2.0",
|
||||
"strip-ansi": "^7.0.1",
|
||||
"strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
|
||||
"wrap-ansi": "^8.1.0",
|
||||
"wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^14.0.27",
|
||||
"@typescript-eslint/eslint-plugin": "^4.0.0",
|
||||
"@typescript-eslint/parser": "^4.0.0",
|
||||
"c8": "^7.3.0",
|
||||
"chai": "^4.2.0",
|
||||
"chalk": "^4.1.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"eslint": "^7.6.0",
|
||||
"eslint-plugin-import": "^2.22.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"gts": "^3.0.0",
|
||||
"mocha": "^10.0.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"rollup": "^2.23.1",
|
||||
"rollup-plugin-ts": "^3.0.2",
|
||||
"standardx": "^7.0.0",
|
||||
"typescript": "^4.0.0"
|
||||
},
|
||||
"files": [
|
||||
"build",
|
||||
"index.mjs",
|
||||
"!*.d.ts"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
}
|
19
frontend/node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
19
frontend/node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
227
frontend/node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
227
frontend/node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
|
@ -0,0 +1,227 @@
|
|||
# @jridgewell/gen-mapping
|
||||
|
||||
> Generate source maps
|
||||
|
||||
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||
provides the same `addMapping` and `setSourceContent` API.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/gen-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping({
|
||||
file: 'output.js',
|
||||
sourceRoot: 'https://example.com/',
|
||||
});
|
||||
|
||||
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||
|
||||
addMapping(map, {
|
||||
// Lines start at line 1, columns at column 0.
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
addMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 9 },
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
assert.deepEqual(toDecodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: [
|
||||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||
],
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: 'AAAA,SAASA',
|
||||
});
|
||||
```
|
||||
|
||||
### Smaller Sourcemaps
|
||||
|
||||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||
skipped.
|
||||
|
||||
```typescript
|
||||
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping();
|
||||
|
||||
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// Adding a new source marking is useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||
// generated column changed.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
names: [],
|
||||
sources: ['input.js'],
|
||||
sourcesContent: [null],
|
||||
mappings: 'AAAA',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 5852872 bytes
|
||||
gen-mapping: addMapping 7716042 bytes
|
||||
source-map-js 6143250 bytes
|
||||
source-map-0.6.1 6124102 bytes
|
||||
source-map-0.8.0 6121173 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 37578063 bytes
|
||||
gen-mapping: addMapping 37212897 bytes
|
||||
source-map-js 47638527 bytes
|
||||
source-map-0.6.1 47690503 bytes
|
||||
source-map-0.8.0 47470188 bytes
|
||||
Smallest memory usage is gen-mapping: addMapping
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 416247 bytes
|
||||
gen-mapping: addMapping 419824 bytes
|
||||
source-map-js 1024619 bytes
|
||||
source-map-0.6.1 1146004 bytes
|
||||
source-map-0.8.0 1113250 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 975096 bytes
|
||||
gen-mapping: addMapping 1102981 bytes
|
||||
source-map-js 2918836 bytes
|
||||
source-map-0.6.1 2885435 bytes
|
||||
source-map-0.8.0 2874336 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
230
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
230
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
|
@ -0,0 +1,230 @@
|
|||
import { SetArray, put, remove } from '@jridgewell/set-array';
|
||||
import { encode } from '@jridgewell/sourcemap-codec';
|
||||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
const NO_NAME = -1;
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
class GenMapping {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new SetArray();
|
||||
this._sources = new SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new SetArray();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
}
|
||||
function addMapping(map, mapping) {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
}
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
function setSourceContent(map, source, content) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||
const index = put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
}
|
||||
function setIgnore(map, source, ignore = true) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||
const index = put(sources, source);
|
||||
if (index === sourcesContent.length)
|
||||
sourcesContent[index] = null;
|
||||
if (ignore)
|
||||
put(ignoreList, index);
|
||||
else
|
||||
remove(ignoreList, index);
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toDecodedMap(map) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList: ignoreList.array,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toEncodedMap(map) {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||
}
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
function fromMap(input) {
|
||||
const map = new TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(cast(gen)._names, map.names);
|
||||
putAll(cast(gen)._sources, map.sources);
|
||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast(gen)._mappings = decodedMappings(map);
|
||||
if (map.ignoreList)
|
||||
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
function allMappings(map) {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = undefined;
|
||||
let original = undefined;
|
||||
let name = undefined;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5)
|
||||
name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||
const line = getLine(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index))
|
||||
return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
const sourcesIndex = put(sources, source);
|
||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length)
|
||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(line, index, name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||
}
|
||||
function getLine(mappings, index) {
|
||||
for (let i = mappings.length; i <= index; i++) {
|
||||
mappings[i] = [];
|
||||
}
|
||||
return mappings[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN])
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0)
|
||||
break;
|
||||
}
|
||||
if (len < length)
|
||||
mappings.length = len;
|
||||
}
|
||||
function putAll(setarr, array) {
|
||||
for (let i = 0; i < array.length; i++)
|
||||
put(setarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0)
|
||||
return true;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0)
|
||||
return false;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1)
|
||||
return false;
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||
}
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||
}
|
||||
|
||||
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setIgnore, setSourceContent, toDecodedMap, toEncodedMap };
|
||||
//# sourceMappingURL=gen-mapping.mjs.map
|
1
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
246
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
246
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,246 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
||||
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
const NO_NAME = -1;
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
class GenMapping {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new setArray.SetArray();
|
||||
this._sources = new setArray.SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new setArray.SetArray();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
}
|
||||
function addMapping(map, mapping) {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
}
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
function setSourceContent(map, source, content) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||
const index = setArray.put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
}
|
||||
function setIgnore(map, source, ignore = true) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||
const index = setArray.put(sources, source);
|
||||
if (index === sourcesContent.length)
|
||||
sourcesContent[index] = null;
|
||||
if (ignore)
|
||||
setArray.put(ignoreList, index);
|
||||
else
|
||||
setArray.remove(ignoreList, index);
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toDecodedMap(map) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList: ignoreList.array,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toEncodedMap(map) {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
||||
}
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
function fromMap(input) {
|
||||
const map = new traceMapping.TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(cast(gen)._names, map.names);
|
||||
putAll(cast(gen)._sources, map.sources);
|
||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast(gen)._mappings = traceMapping.decodedMappings(map);
|
||||
if (map.ignoreList)
|
||||
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
function allMappings(map) {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = undefined;
|
||||
let original = undefined;
|
||||
let name = undefined;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5)
|
||||
name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||
const line = getLine(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index))
|
||||
return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
const sourcesIndex = setArray.put(sources, source);
|
||||
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length)
|
||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(line, index, name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||
}
|
||||
function getLine(mappings, index) {
|
||||
for (let i = mappings.length; i <= index; i++) {
|
||||
mappings[i] = [];
|
||||
}
|
||||
return mappings[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN])
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0)
|
||||
break;
|
||||
}
|
||||
if (len < length)
|
||||
mappings.length = len;
|
||||
}
|
||||
function putAll(setarr, array) {
|
||||
for (let i = 0; i < array.length; i++)
|
||||
setArray.put(setarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0)
|
||||
return true;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0)
|
||||
return false;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1)
|
||||
return false;
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||
}
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||
}
|
||||
|
||||
exports.GenMapping = GenMapping;
|
||||
exports.addMapping = addMapping;
|
||||
exports.addSegment = addSegment;
|
||||
exports.allMappings = allMappings;
|
||||
exports.fromMap = fromMap;
|
||||
exports.maybeAddMapping = maybeAddMapping;
|
||||
exports.maybeAddSegment = maybeAddSegment;
|
||||
exports.setIgnore = setIgnore;
|
||||
exports.setSourceContent = setSourceContent;
|
||||
exports.toDecodedMap = toDecodedMap;
|
||||
exports.toEncodedMap = toEncodedMap;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=gen-mapping.umd.js.map
|
1
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
88
frontend/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
88
frontend/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,88 @@
|
|||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||
export declare type Options = {
|
||||
file?: string | null;
|
||||
sourceRoot?: string | null;
|
||||
};
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
export declare class GenMapping {
|
||||
private _names;
|
||||
private _sources;
|
||||
private _sourcesContent;
|
||||
private _mappings;
|
||||
private _ignoreList;
|
||||
file: string | null | undefined;
|
||||
sourceRoot: string | null | undefined;
|
||||
constructor({ file, sourceRoot }?: Options);
|
||||
}
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source?: null;
|
||||
original?: null;
|
||||
name?: null;
|
||||
content?: null;
|
||||
}): void;
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name?: null;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare const maybeAddSegment: typeof addSegment;
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare const maybeAddMapping: typeof addMapping;
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
|
||||
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
export declare function fromMap(input: SourceMapInput): GenMapping;
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
export declare function allMappings(map: GenMapping): Mapping[];
|
12
frontend/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
frontend/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export {};
|
36
frontend/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
36
frontend/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: readonly string[];
|
||||
sourceRoot?: string;
|
||||
sources: readonly (string | null)[];
|
||||
sourcesContent?: readonly (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: readonly number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
}
|
||||
export interface Pos {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
export declare type Mapping = {
|
||||
generated: Pos;
|
||||
source: undefined;
|
||||
original: undefined;
|
||||
name: undefined;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: undefined;
|
||||
};
|
76
frontend/node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
76
frontend/node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"name": "@jridgewell/gen-mapping",
|
||||
"version": "0.3.5",
|
||||
"description": "Generate source maps",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/gen-mapping",
|
||||
"main": "dist/gen-mapping.umd.js",
|
||||
"module": "dist/gen-mapping.mjs",
|
||||
"types": "dist/types/gen-mapping.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/gen-mapping.d.ts",
|
||||
"browser": "./dist/gen-mapping.umd.js",
|
||||
"require": "./dist/gen-mapping.umd.js",
|
||||
"import": "./dist/gen-mapping.mjs"
|
||||
},
|
||||
"./dist/gen-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node benchmark/index.mjs",
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.29",
|
||||
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||
"@typescript-eslint/parser": "5.21.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"mocha": "9.2.2",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/set-array": "^1.2.1",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
}
|
19
frontend/node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
19
frontend/node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
40
frontend/node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
40
frontend/node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
# @jridgewell/resolve-uri
|
||||
|
||||
> Resolve a URI relative to an optional base URI
|
||||
|
||||
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/resolve-uri
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function resolve(input: string, base?: string): string;
|
||||
```
|
||||
|
||||
```js
|
||||
import resolve from '@jridgewell/resolve-uri';
|
||||
|
||||
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||
```
|
||||
|
||||
| Input | Base | Resolution | Explanation |
|
||||
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
232
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
232
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
|
@ -0,0 +1,232 @@
|
|||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: 7 /* Absolute */,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = 6 /* SchemeRelative */;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = 5 /* AbsolutePath */;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? 3 /* Query */
|
||||
: input.startsWith('#')
|
||||
? 2 /* Hash */
|
||||
: 4 /* RelativePath */
|
||||
: 1 /* Empty */;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= 4 /* RelativePath */;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== 7 /* Absolute */) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case 1 /* Empty */:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case 2 /* Hash */:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case 3 /* Query */:
|
||||
case 4 /* RelativePath */:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case 5 /* AbsolutePath */:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case 6 /* SchemeRelative */:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case 2 /* Hash */:
|
||||
case 3 /* Query */:
|
||||
return queryHash;
|
||||
case 4 /* RelativePath */: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case 5 /* AbsolutePath */:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
export { resolve as default };
|
||||
//# sourceMappingURL=resolve-uri.mjs.map
|
1
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
240
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
240
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,240 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||
})(this, (function () { 'use strict';
|
||||
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: 7 /* Absolute */,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = 6 /* SchemeRelative */;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = 5 /* AbsolutePath */;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? 3 /* Query */
|
||||
: input.startsWith('#')
|
||||
? 2 /* Hash */
|
||||
: 4 /* RelativePath */
|
||||
: 1 /* Empty */;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= 4 /* RelativePath */;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== 7 /* Absolute */) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case 1 /* Empty */:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case 2 /* Hash */:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case 3 /* Query */:
|
||||
case 4 /* RelativePath */:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case 5 /* AbsolutePath */:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case 6 /* SchemeRelative */:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case 2 /* Hash */:
|
||||
case 3 /* Query */:
|
||||
return queryHash;
|
||||
case 4 /* RelativePath */: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case 5 /* AbsolutePath */:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
return resolve;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=resolve-uri.umd.js.map
|
1
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
frontend/node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
4
frontend/node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
69
frontend/node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
69
frontend/node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
{
|
||||
"name": "@jridgewell/resolve-uri",
|
||||
"version": "3.1.2",
|
||||
"description": "Resolve a URI relative to an optional base URI",
|
||||
"keywords": [
|
||||
"resolve",
|
||||
"uri",
|
||||
"url",
|
||||
"path"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||
"main": "dist/resolve-uri.umd.js",
|
||||
"module": "dist/resolve-uri.mjs",
|
||||
"types": "dist/types/resolve-uri.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/resolve-uri.d.ts",
|
||||
"browser": "./dist/resolve-uri.umd.js",
|
||||
"require": "./dist/resolve-uri.umd.js",
|
||||
"import": "./dist/resolve-uri.mjs"
|
||||
},
|
||||
"./dist/resolve-uri.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
}
|
19
frontend/node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
19
frontend/node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
37
frontend/node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
37
frontend/node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
# @jridgewell/set-array
|
||||
|
||||
> Like a Set, but provides the index of the `key` in the backing array
|
||||
|
||||
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
||||
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
||||
are never duplicates.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/set-array
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
||||
|
||||
const sa = new SetArray();
|
||||
|
||||
let index = put(sa, 'first');
|
||||
assert.strictEqual(index, 0);
|
||||
|
||||
index = put(sa, 'second');
|
||||
assert.strictEqual(index, 1);
|
||||
|
||||
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
||||
|
||||
index = get(sa, 'first');
|
||||
assert.strictEqual(index, 0);
|
||||
|
||||
pop(sa);
|
||||
index = get(sa, 'second');
|
||||
assert.strictEqual(index, undefined);
|
||||
assert.deepEqual(sa.array, [ 'first' ]);
|
||||
```
|
69
frontend/node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
69
frontend/node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
class SetArray {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(set) {
|
||||
return set;
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
function get(setarr, key) {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
function put(setarr, key) {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(setarr, key);
|
||||
if (index !== undefined)
|
||||
return index;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
const length = array.push(key);
|
||||
return (indexes[key] = length - 1);
|
||||
}
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
function pop(setarr) {
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
if (array.length === 0)
|
||||
return;
|
||||
const last = array.pop();
|
||||
indexes[last] = undefined;
|
||||
}
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
function remove(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index === undefined)
|
||||
return;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]--;
|
||||
}
|
||||
indexes[key] = undefined;
|
||||
array.pop();
|
||||
}
|
||||
|
||||
export { SetArray, get, pop, put, remove };
|
||||
//# sourceMappingURL=set-array.mjs.map
|
1
frontend/node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":"AAEA;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CACF;AAOD;;;;AAIA,SAAS,IAAI,CAAgB,GAAgB;IAC3C,OAAO,GAAU,CAAC;AACpB,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;IAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACpC,CAAC;AAED;;;;SAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;IAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;AACrC,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB;IACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;IAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;AAC5B,CAAC;AAED;;;SAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;IAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;KACf;IACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzB,KAAK,CAAC,GAAG,EAAE,CAAC;AACd;;;;"}
|
83
frontend/node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
83
frontend/node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
class SetArray {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(set) {
|
||||
return set;
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
function get(setarr, key) {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
function put(setarr, key) {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(setarr, key);
|
||||
if (index !== undefined)
|
||||
return index;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
const length = array.push(key);
|
||||
return (indexes[key] = length - 1);
|
||||
}
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
function pop(setarr) {
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
if (array.length === 0)
|
||||
return;
|
||||
const last = array.pop();
|
||||
indexes[last] = undefined;
|
||||
}
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
function remove(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index === undefined)
|
||||
return;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]--;
|
||||
}
|
||||
indexes[key] = undefined;
|
||||
array.pop();
|
||||
}
|
||||
|
||||
exports.SetArray = SetArray;
|
||||
exports.get = get;
|
||||
exports.pop = pop;
|
||||
exports.put = put;
|
||||
exports.remove = remove;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=set-array.umd.js.map
|
1
frontend/node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":";;;;;;IAEA;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KACF;IAOD;;;;IAIA,SAAS,IAAI,CAAgB,GAAgB;QAC3C,OAAO,GAAU,CAAC;IACpB,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACpC,CAAC;IAED;;;;aAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;QAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;IACrC,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB;QACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;IAC5B,CAAC;IAED;;;aAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;QAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO;QAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;SACf;QACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;QACzB,KAAK,CAAC,GAAG,EAAE,CAAC;IACd;;;;;;;;;;;;;;"}
|
32
frontend/node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
32
frontend/node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
declare type Key = string | number | symbol;
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
export declare class SetArray<T extends Key = Key> {
|
||||
private _indexes;
|
||||
array: readonly T[];
|
||||
constructor();
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
|
||||
export {};
|
65
frontend/node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
65
frontend/node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
{
|
||||
"name": "@jridgewell/set-array",
|
||||
"version": "1.2.1",
|
||||
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
||||
"keywords": [],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/set-array",
|
||||
"main": "dist/set-array.umd.js",
|
||||
"module": "dist/set-array.mjs",
|
||||
"typings": "dist/types/set-array.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/set-array.d.ts",
|
||||
"browser": "./dist/set-array.umd.js",
|
||||
"require": "./dist/set-array.umd.js",
|
||||
"import": "./dist/set-array.mjs"
|
||||
},
|
||||
"./dist/set-array.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.29",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
}
|
21
frontend/node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
frontend/node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
264
frontend/node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
264
frontend/node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
|
@ -0,0 +1,264 @@
|
|||
# @jridgewell/sourcemap-codec
|
||||
|
||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||
|
||||
This package makes the process slightly easier.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
|
||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
|
||||
assert.deepEqual( decoded, [
|
||||
// the first line (of the generated code) has no mappings,
|
||||
// as shown by the starting semi-colon (which separates lines)
|
||||
[],
|
||||
|
||||
// the second line contains four (comma-separated) segments
|
||||
[
|
||||
// segments are encoded as you'd expect:
|
||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||
|
||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||
// name in the `map.names` array
|
||||
[ 2, 0, 2, 2, 0 ],
|
||||
|
||||
// the remaining segments are 4-length rather than 5-length,
|
||||
// because they don't map a name
|
||||
[ 4, 0, 2, 4 ],
|
||||
[ 6, 0, 2, 5 ],
|
||||
[ 7, 0, 2, 7 ]
|
||||
],
|
||||
|
||||
// the final line contains two segments
|
||||
[
|
||||
[ 2, 1, 10, 19 ],
|
||||
[ 12, 1, 11, 20 ]
|
||||
]
|
||||
]);
|
||||
|
||||
var encoded = encode( decoded );
|
||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v20.10.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 5815135 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 5868160 bytes
|
||||
sourcemap-codec 5492584 bytes
|
||||
source-map-0.6.1 13569984 bytes
|
||||
source-map-0.8.0 6390584 bytes
|
||||
chrome dev tools 8011136 bytes
|
||||
Smallest memory usage is sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 492 ops/sec ±1.22% (90 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 499 ops/sec ±1.16% (89 runs sampled)
|
||||
decode: sourcemap-codec x 376 ops/sec ±1.66% (89 runs sampled)
|
||||
decode: source-map-0.6.1 x 34.99 ops/sec ±0.94% (48 runs sampled)
|
||||
decode: source-map-0.8.0 x 351 ops/sec ±0.07% (95 runs sampled)
|
||||
chrome dev tools x 165 ops/sec ±0.91% (86 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 444248 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 623024 bytes
|
||||
sourcemap-codec 8696280 bytes
|
||||
source-map-0.6.1 8745176 bytes
|
||||
source-map-0.8.0 8736624 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 796 ops/sec ±0.11% (97 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 795 ops/sec ±0.25% (98 runs sampled)
|
||||
encode: sourcemap-codec x 231 ops/sec ±0.83% (86 runs sampled)
|
||||
encode: source-map-0.6.1 x 166 ops/sec ±0.57% (86 runs sampled)
|
||||
encode: source-map-0.8.0 x 203 ops/sec ±0.45% (88 runs sampled)
|
||||
Fastest is encode: local code,encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 35424960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 35424696 bytes
|
||||
sourcemap-codec 36033464 bytes
|
||||
source-map-0.6.1 62253704 bytes
|
||||
source-map-0.8.0 43843920 bytes
|
||||
chrome dev tools 45111400 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 38.18 ops/sec ±5.44% (52 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 38.36 ops/sec ±5.02% (52 runs sampled)
|
||||
decode: sourcemap-codec x 34.05 ops/sec ±4.45% (47 runs sampled)
|
||||
decode: source-map-0.6.1 x 4.31 ops/sec ±2.76% (15 runs sampled)
|
||||
decode: source-map-0.8.0 x 55.60 ops/sec ±0.13% (73 runs sampled)
|
||||
chrome dev tools x 16.94 ops/sec ±3.78% (46 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 2606016 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 2626440 bytes
|
||||
sourcemap-codec 21152576 bytes
|
||||
source-map-0.6.1 25023928 bytes
|
||||
source-map-0.8.0 25256448 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 127 ops/sec ±0.18% (83 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 128 ops/sec ±0.26% (83 runs sampled)
|
||||
encode: sourcemap-codec x 29.31 ops/sec ±2.55% (53 runs sampled)
|
||||
encode: source-map-0.6.1 x 18.85 ops/sec ±3.19% (36 runs sampled)
|
||||
encode: source-map-0.8.0 x 19.34 ops/sec ±1.97% (36 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 261696 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 244296 bytes
|
||||
sourcemap-codec 302816 bytes
|
||||
source-map-0.6.1 939176 bytes
|
||||
source-map-0.8.0 336 bytes
|
||||
chrome dev tools 587368 bytes
|
||||
Smallest memory usage is source-map-0.8.0
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 17,782 ops/sec ±0.32% (97 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 17,863 ops/sec ±0.40% (100 runs sampled)
|
||||
decode: sourcemap-codec x 12,453 ops/sec ±0.27% (101 runs sampled)
|
||||
decode: source-map-0.6.1 x 1,288 ops/sec ±1.05% (96 runs sampled)
|
||||
decode: source-map-0.8.0 x 9,289 ops/sec ±0.27% (101 runs sampled)
|
||||
chrome dev tools x 4,769 ops/sec ±0.18% (100 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 262944 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 25544 bytes
|
||||
sourcemap-codec 323048 bytes
|
||||
source-map-0.6.1 507808 bytes
|
||||
source-map-0.8.0 507480 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 24,207 ops/sec ±0.79% (95 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 24,288 ops/sec ±0.48% (96 runs sampled)
|
||||
encode: sourcemap-codec x 6,761 ops/sec ±0.21% (100 runs sampled)
|
||||
encode: source-map-0.6.1 x 5,374 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 5,633 ops/sec ±0.32% (99 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15,encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 678816 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 678816 bytes
|
||||
sourcemap-codec 816400 bytes
|
||||
source-map-0.6.1 2288864 bytes
|
||||
source-map-0.8.0 721360 bytes
|
||||
chrome dev tools 1012512 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 6,178 ops/sec ±0.19% (98 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 6,261 ops/sec ±0.22% (100 runs sampled)
|
||||
decode: sourcemap-codec x 4,472 ops/sec ±0.90% (99 runs sampled)
|
||||
decode: source-map-0.6.1 x 449 ops/sec ±0.31% (95 runs sampled)
|
||||
decode: source-map-0.8.0 x 3,219 ops/sec ±0.13% (100 runs sampled)
|
||||
chrome dev tools x 1,743 ops/sec ±0.20% (99 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 140960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 159808 bytes
|
||||
sourcemap-codec 969304 bytes
|
||||
source-map-0.6.1 930520 bytes
|
||||
source-map-0.8.0 930248 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 8,013 ops/sec ±0.19% (100 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 7,989 ops/sec ±0.20% (101 runs sampled)
|
||||
encode: sourcemap-codec x 2,472 ops/sec ±0.21% (99 runs sampled)
|
||||
encode: source-map-0.6.1 x 2,200 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 2,220 ops/sec ±0.37% (99 runs sampled)
|
||||
Fastest is encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
vscode.map - 2141001 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 198955264 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 199175352 bytes
|
||||
sourcemap-codec 199102688 bytes
|
||||
source-map-0.6.1 386323432 bytes
|
||||
source-map-0.8.0 244116432 bytes
|
||||
chrome dev tools 293734280 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 3.90 ops/sec ±22.21% (15 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 3.95 ops/sec ±23.53% (15 runs sampled)
|
||||
decode: sourcemap-codec x 3.82 ops/sec ±17.94% (14 runs sampled)
|
||||
decode: source-map-0.6.1 x 0.61 ops/sec ±7.81% (6 runs sampled)
|
||||
decode: source-map-0.8.0 x 9.54 ops/sec ±0.28% (28 runs sampled)
|
||||
chrome dev tools x 2.18 ops/sec ±10.58% (10 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 13509880 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 13537648 bytes
|
||||
sourcemap-codec 32540104 bytes
|
||||
source-map-0.6.1 127531040 bytes
|
||||
source-map-0.8.0 127535312 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 20.10 ops/sec ±0.19% (38 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 20.26 ops/sec ±0.32% (38 runs sampled)
|
||||
encode: sourcemap-codec x 5.44 ops/sec ±1.64% (18 runs sampled)
|
||||
encode: source-map-0.6.1 x 2.30 ops/sec ±4.79% (10 runs sampled)
|
||||
encode: source-map-0.8.0 x 2.46 ops/sec ±6.53% (10 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
424
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
424
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
|
@ -0,0 +1,424 @@
|
|||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
export { decode, decodeGeneratedRanges, decodeOriginalScopes, encode, encodeGeneratedRanges, encodeOriginalScopes };
|
||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
439
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
439
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,439 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
exports.decode = decode;
|
||||
exports.decodeGeneratedRanges = decodeGeneratedRanges;
|
||||
exports.decodeOriginalScopes = decodeOriginalScopes;
|
||||
exports.encode = encode;
|
||||
exports.encodeGeneratedRanges = encodeGeneratedRanges;
|
||||
exports.encodeOriginalScopes = encodeOriginalScopes;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
49
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
49
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
declare type Line = number;
|
||||
declare type Column = number;
|
||||
declare type Kind = number;
|
||||
declare type Name = number;
|
||||
declare type Var = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type ScopesIndex = number;
|
||||
declare type Mix<A, B, O> = (A & O) | (B & O);
|
||||
export declare type OriginalScope = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind,
|
||||
Name
|
||||
], {
|
||||
vars: Var[];
|
||||
}>;
|
||||
export declare type GeneratedRange = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
SourcesIndex,
|
||||
ScopesIndex
|
||||
], {
|
||||
callsite: CallSite | null;
|
||||
bindings: Binding[];
|
||||
isScope: boolean;
|
||||
}>;
|
||||
export declare type CallSite = [SourcesIndex, Line, Column];
|
||||
declare type Binding = BindingExpressionRange[];
|
||||
export declare type BindingExpressionRange = [Name] | [Name, Line, Column];
|
||||
export declare function decodeOriginalScopes(input: string): OriginalScope[];
|
||||
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
|
||||
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
|
||||
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
|
||||
export {};
|
8
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
8
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes';
|
||||
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
|
||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||
export declare type SourceMapLine = SourceMapSegment[];
|
||||
export declare type SourceMapMappings = SourceMapLine[];
|
||||
export declare function decode(mappings: string): SourceMapMappings;
|
||||
export declare function encode(decoded: SourceMapMappings): string;
|
||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
15
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
15
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
export declare class StringWriter {
|
||||
pos: number;
|
||||
private out;
|
||||
private buffer;
|
||||
write(v: number): void;
|
||||
flush(): string;
|
||||
}
|
||||
export declare class StringReader {
|
||||
pos: number;
|
||||
private buffer;
|
||||
constructor(buffer: string);
|
||||
next(): number;
|
||||
peek(): number;
|
||||
indexOf(char: string): number;
|
||||
}
|
6
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
6
frontend/node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
import type { StringReader, StringWriter } from './strings';
|
||||
export declare const comma: number;
|
||||
export declare const semicolon: number;
|
||||
export declare function decodeInteger(reader: StringReader, relative: number): number;
|
||||
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
|
||||
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
|
75
frontend/node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
75
frontend/node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
"name": "@jridgewell/sourcemap-codec",
|
||||
"version": "1.5.0",
|
||||
"description": "Encode/decode sourcemap mappings",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"vlq"
|
||||
],
|
||||
"main": "dist/sourcemap-codec.umd.js",
|
||||
"module": "dist/sourcemap-codec.mjs",
|
||||
"types": "dist/types/sourcemap-codec.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||
"browser": "./dist/sourcemap-codec.umd.js",
|
||||
"require": "./dist/sourcemap-codec.umd.js",
|
||||
"import": "./dist/sourcemap-codec.mjs"
|
||||
},
|
||||
"./dist/sourcemap-codec.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||
},
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"source-map": "0.6.1",
|
||||
"source-map-js": "1.0.2",
|
||||
"sourcemap-codec": "1.4.8",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.5.4"
|
||||
}
|
||||
}
|
19
frontend/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
frontend/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
257
frontend/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
frontend/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
|
@ -0,0 +1,257 @@
|
|||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import {
|
||||
TraceMap,
|
||||
originalPositionFor,
|
||||
generatedPositionFor,
|
||||
sourceContentFor,
|
||||
isIgnored,
|
||||
} from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['content of input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
ignoreList: [],
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const content = sourceContentFor(tracer, traced.source);
|
||||
assert.strictEqual(content, 'content for input.js');
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
|
||||
const ignored = isIgnored(tracer, 'input.js');
|
||||
assert.equal(ignored, false);
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 562400 bytes
|
||||
trace-mapping encoded 5706544 bytes
|
||||
source-map-js 10717664 bytes
|
||||
source-map-0.6.1 17446384 bytes
|
||||
source-map-0.8.0 9701757 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 89832 bytes
|
||||
trace-mapping encoded 35474640 bytes
|
||||
source-map-js 51257176 bytes
|
||||
source-map-0.6.1 63515664 bytes
|
||||
source-map-0.8.0 42933752 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 37128 bytes
|
||||
trace-mapping encoded 247280 bytes
|
||||
source-map-js 1143536 bytes
|
||||
source-map-0.6.1 1290992 bytes
|
||||
source-map-0.8.0 96544 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 16176 bytes
|
||||
trace-mapping encoded 681552 bytes
|
||||
source-map-js 2418352 bytes
|
||||
source-map-0.6.1 2443672 bytes
|
||||
source-map-0.8.0 111768 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
580
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
|
@ -0,0 +1,580 @@
|
|||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
1
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
600
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
600
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,600 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||
exports.decodedMap = decodedMap;
|
||||
exports.decodedMappings = decodedMappings;
|
||||
exports.eachMapping = eachMapping;
|
||||
exports.encodedMap = encodedMap;
|
||||
exports.encodedMappings = encodedMappings;
|
||||
exports.generatedPositionFor = generatedPositionFor;
|
||||
exports.isIgnored = isIgnored;
|
||||
exports.originalPositionFor = originalPositionFor;
|
||||
exports.presortedDecodedMap = presortedDecodedMap;
|
||||
exports.sourceContentFor = sourceContentFor;
|
||||
exports.traceSegment = traceSegment;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
frontend/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
frontend/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
32
frontend/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
frontend/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
frontend/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
frontend/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
frontend/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
frontend/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export default function resolve(input: string, base: string | undefined): string;
|
2
frontend/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
frontend/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
frontend/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
frontend/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
type GeneratedLine = number;
|
||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
4
frontend/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
frontend/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
79
frontend/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
79
frontend/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
99
frontend/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
99
frontend/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||
export type XInput = {
|
||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||
};
|
||||
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||
sections: SectionXInput[];
|
||||
};
|
||||
export type SectionXInput = Omit<Section, 'map'> & {
|
||||
map: SectionedSourceMapInput;
|
||||
};
|
||||
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||
export type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
}
|
77
frontend/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
77
frontend/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.25",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"types": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/trace-mapping.d.ts",
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./dist/trace-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.mjs",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "11.1.6",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "20.11.20",
|
||||
"@typescript-eslint/eslint-plugin": "6.18.1",
|
||||
"@typescript-eslint/parser": "6.18.1",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "9.0.0",
|
||||
"esbuild": "0.19.11",
|
||||
"eslint": "8.56.0",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-plugin-no-only-tests": "3.1.0",
|
||||
"mocha": "10.3.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "3.1.1",
|
||||
"rollup": "4.9.4",
|
||||
"tsx": "4.7.0",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
}
|
21
frontend/node_modules/@nodelib/fs.scandir/LICENSE
generated
vendored
Normal file
21
frontend/node_modules/@nodelib/fs.scandir/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Denis Malinochkin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
171
frontend/node_modules/@nodelib/fs.scandir/README.md
generated
vendored
Normal file
171
frontend/node_modules/@nodelib/fs.scandir/README.md
generated
vendored
Normal file
|
@ -0,0 +1,171 @@
|
|||
# @nodelib/fs.scandir
|
||||
|
||||
> List files and directories inside the specified directory.
|
||||
|
||||
## :bulb: Highlights
|
||||
|
||||
The package is aimed at obtaining information about entries in the directory.
|
||||
|
||||
* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional).
|
||||
* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode).
|
||||
* :link: Can safely work with broken symbolic links.
|
||||
|
||||
## Install
|
||||
|
||||
```console
|
||||
npm install @nodelib/fs.scandir
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import * as fsScandir from '@nodelib/fs.scandir';
|
||||
|
||||
fsScandir.scandir('path', (error, stats) => { /* … */ });
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### .scandir(path, [optionsOrSettings], callback)
|
||||
|
||||
Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style.
|
||||
|
||||
```ts
|
||||
fsScandir.scandir('path', (error, entries) => { /* … */ });
|
||||
fsScandir.scandir('path', {}, (error, entries) => { /* … */ });
|
||||
fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ });
|
||||
```
|
||||
|
||||
### .scandirSync(path, [optionsOrSettings])
|
||||
|
||||
Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path.
|
||||
|
||||
```ts
|
||||
const entries = fsScandir.scandirSync('path');
|
||||
const entries = fsScandir.scandirSync('path', {});
|
||||
const entries = fsScandir.scandirSync(('path', new fsScandir.Settings());
|
||||
```
|
||||
|
||||
#### path
|
||||
|
||||
* Required: `true`
|
||||
* Type: `string | Buffer | URL`
|
||||
|
||||
A path to a file. If a URL is provided, it must use the `file:` protocol.
|
||||
|
||||
#### optionsOrSettings
|
||||
|
||||
* Required: `false`
|
||||
* Type: `Options | Settings`
|
||||
* Default: An instance of `Settings` class
|
||||
|
||||
An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class.
|
||||
|
||||
> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class.
|
||||
|
||||
### Settings([options])
|
||||
|
||||
A class of full settings of the package.
|
||||
|
||||
```ts
|
||||
const settings = new fsScandir.Settings({ followSymbolicLinks: false });
|
||||
|
||||
const entries = fsScandir.scandirSync('path', settings);
|
||||
```
|
||||
|
||||
## Entry
|
||||
|
||||
* `name` — The name of the entry (`unknown.txt`).
|
||||
* `path` — The path of the entry relative to call directory (`root/unknown.txt`).
|
||||
* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class.
|
||||
* `stats` (optional) — An instance of `fs.Stats` class.
|
||||
|
||||
For example, the `scandir` call for `tools` directory with one directory inside:
|
||||
|
||||
```ts
|
||||
{
|
||||
dirent: Dirent { name: 'typedoc', /* … */ },
|
||||
name: 'typedoc',
|
||||
path: 'tools/typedoc'
|
||||
}
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
### stats
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Adds an instance of `fs.Stats` class to the [`Entry`](#entry).
|
||||
|
||||
> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO??
|
||||
|
||||
### followSymbolicLinks
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`.
|
||||
|
||||
### `throwErrorOnBrokenSymbolicLink`
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `true`
|
||||
|
||||
Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`.
|
||||
|
||||
### `pathSegmentSeparator`
|
||||
|
||||
* Type: `string`
|
||||
* Default: `path.sep`
|
||||
|
||||
By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead.
|
||||
|
||||
### `fs`
|
||||
|
||||
* Type: [`FileSystemAdapter`](./src/adapters/fs.ts)
|
||||
* Default: A default FS methods
|
||||
|
||||
By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own.
|
||||
|
||||
```ts
|
||||
interface FileSystemAdapter {
|
||||
lstat?: typeof fs.lstat;
|
||||
stat?: typeof fs.stat;
|
||||
lstatSync?: typeof fs.lstatSync;
|
||||
statSync?: typeof fs.statSync;
|
||||
readdir?: typeof fs.readdir;
|
||||
readdirSync?: typeof fs.readdirSync;
|
||||
}
|
||||
|
||||
const settings = new fsScandir.Settings({
|
||||
fs: { lstat: fakeLstat }
|
||||
});
|
||||
```
|
||||
|
||||
## `old` and `modern` mode
|
||||
|
||||
This package has two modes that are used depending on the environment and parameters of use.
|
||||
|
||||
### old
|
||||
|
||||
* Node.js below `10.10` or when the `stats` option is enabled
|
||||
|
||||
When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links).
|
||||
|
||||
### modern
|
||||
|
||||
* Node.js 10.10+ and the `stats` option is disabled
|
||||
|
||||
In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present.
|
||||
|
||||
This mode makes fewer calls to the file system. It's faster.
|
||||
|
||||
## Changelog
|
||||
|
||||
See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version.
|
||||
|
||||
## License
|
||||
|
||||
This software is released under the terms of the MIT license.
|
20
frontend/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts
generated
vendored
Normal file
20
frontend/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
import type * as fsStat from '@nodelib/fs.stat';
|
||||
import type { Dirent, ErrnoException } from '../types';
|
||||
export interface ReaddirAsynchronousMethod {
|
||||
(filepath: string, options: {
|
||||
withFileTypes: true;
|
||||
}, callback: (error: ErrnoException | null, files: Dirent[]) => void): void;
|
||||
(filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void;
|
||||
}
|
||||
export interface ReaddirSynchronousMethod {
|
||||
(filepath: string, options: {
|
||||
withFileTypes: true;
|
||||
}): Dirent[];
|
||||
(filepath: string): string[];
|
||||
}
|
||||
export declare type FileSystemAdapter = fsStat.FileSystemAdapter & {
|
||||
readdir: ReaddirAsynchronousMethod;
|
||||
readdirSync: ReaddirSynchronousMethod;
|
||||
};
|
||||
export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter;
|
||||
export declare function createFileSystemAdapter(fsMethods?: Partial<FileSystemAdapter>): FileSystemAdapter;
|
19
frontend/node_modules/@nodelib/fs.scandir/out/adapters/fs.js
generated
vendored
Normal file
19
frontend/node_modules/@nodelib/fs.scandir/out/adapters/fs.js
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
|
||||
const fs = require("fs");
|
||||
exports.FILE_SYSTEM_ADAPTER = {
|
||||
lstat: fs.lstat,
|
||||
stat: fs.stat,
|
||||
lstatSync: fs.lstatSync,
|
||||
statSync: fs.statSync,
|
||||
readdir: fs.readdir,
|
||||
readdirSync: fs.readdirSync
|
||||
};
|
||||
function createFileSystemAdapter(fsMethods) {
|
||||
if (fsMethods === undefined) {
|
||||
return exports.FILE_SYSTEM_ADAPTER;
|
||||
}
|
||||
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
|
||||
}
|
||||
exports.createFileSystemAdapter = createFileSystemAdapter;
|
4
frontend/node_modules/@nodelib/fs.scandir/out/constants.d.ts
generated
vendored
Normal file
4
frontend/node_modules/@nodelib/fs.scandir/out/constants.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* IS `true` for Node.js 10.10 and greater.
|
||||
*/
|
||||
export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean;
|
17
frontend/node_modules/@nodelib/fs.scandir/out/constants.js
generated
vendored
Normal file
17
frontend/node_modules/@nodelib/fs.scandir/out/constants.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
|
||||
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
|
||||
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
|
||||
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
|
||||
}
|
||||
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
|
||||
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
|
||||
const SUPPORTED_MAJOR_VERSION = 10;
|
||||
const SUPPORTED_MINOR_VERSION = 10;
|
||||
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
|
||||
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
|
||||
/**
|
||||
* IS `true` for Node.js 10.10 and greater.
|
||||
*/
|
||||
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue