mirror of
https://github.com/netbirdio/netbird.git
synced 2026-04-20 17:26:40 +00:00
Merge branch 'prototype/reverse-proxy-logs-pagination' into prototype/reverse-proxy
This commit is contained in:
@@ -16,14 +16,14 @@ type AccessLogEntry struct {
|
|||||||
ProxyID string `gorm:"index"`
|
ProxyID string `gorm:"index"`
|
||||||
Timestamp time.Time `gorm:"index"`
|
Timestamp time.Time `gorm:"index"`
|
||||||
GeoLocation peer.Location `gorm:"embedded;embeddedPrefix:location_"`
|
GeoLocation peer.Location `gorm:"embedded;embeddedPrefix:location_"`
|
||||||
Method string
|
Method string `gorm:"index"`
|
||||||
Host string
|
Host string `gorm:"index"`
|
||||||
Path string
|
Path string `gorm:"index"`
|
||||||
Duration time.Duration
|
Duration time.Duration `gorm:"index"`
|
||||||
StatusCode int
|
StatusCode int `gorm:"index"`
|
||||||
Reason string
|
Reason string
|
||||||
UserId string
|
UserId string `gorm:"index"`
|
||||||
AuthMethodUsed string
|
AuthMethodUsed string `gorm:"index"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// FromProto creates an AccessLogEntry from a proto.AccessLog
|
// FromProto creates an AccessLogEntry from a proto.AccessLog
|
||||||
|
|||||||
124
management/internals/modules/reverseproxy/accesslogs/filter.go
Normal file
124
management/internals/modules/reverseproxy/accesslogs/filter.go
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
package accesslogs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// DefaultPageSize is the default number of records per page
|
||||||
|
DefaultPageSize = 50
|
||||||
|
// MaxPageSize is the maximum number of records allowed per page
|
||||||
|
MaxPageSize = 100
|
||||||
|
)
|
||||||
|
|
||||||
|
// AccessLogFilter holds pagination and filtering parameters for access logs
|
||||||
|
type AccessLogFilter struct {
|
||||||
|
// Page is the current page number (1-indexed)
|
||||||
|
Page int
|
||||||
|
// PageSize is the number of records per page
|
||||||
|
PageSize int
|
||||||
|
|
||||||
|
// Filtering parameters
|
||||||
|
Search *string // General search across host, path, source IP, and user fields
|
||||||
|
SourceIP *string // Filter by source IP address
|
||||||
|
Host *string // Filter by host header
|
||||||
|
Path *string // Filter by request path (supports LIKE pattern)
|
||||||
|
UserID *string // Filter by authenticated user ID
|
||||||
|
UserEmail *string // Filter by user email (requires user lookup)
|
||||||
|
UserName *string // Filter by user name (requires user lookup)
|
||||||
|
Method *string // Filter by HTTP method
|
||||||
|
Status *string // Filter by status: "success" (2xx/3xx) or "failed" (1xx/4xx/5xx)
|
||||||
|
StatusCode *int // Filter by HTTP status code
|
||||||
|
StartDate *time.Time // Filter by timestamp >= start_date
|
||||||
|
EndDate *time.Time // Filter by timestamp <= end_date
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseFromRequest parses pagination and filter parameters from HTTP request query parameters
|
||||||
|
func (f *AccessLogFilter) ParseFromRequest(r *http.Request) {
|
||||||
|
queryParams := r.URL.Query()
|
||||||
|
|
||||||
|
f.Page = 1
|
||||||
|
if pageStr := queryParams.Get("page"); pageStr != "" {
|
||||||
|
if page, err := strconv.Atoi(pageStr); err == nil && page > 0 {
|
||||||
|
f.Page = page
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
f.PageSize = DefaultPageSize
|
||||||
|
if pageSizeStr := queryParams.Get("page_size"); pageSizeStr != "" {
|
||||||
|
if pageSize, err := strconv.Atoi(pageSizeStr); err == nil && pageSize > 0 {
|
||||||
|
f.PageSize = pageSize
|
||||||
|
if f.PageSize > MaxPageSize {
|
||||||
|
f.PageSize = MaxPageSize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if search := queryParams.Get("search"); search != "" {
|
||||||
|
f.Search = &search
|
||||||
|
}
|
||||||
|
|
||||||
|
if sourceIP := queryParams.Get("source_ip"); sourceIP != "" {
|
||||||
|
f.SourceIP = &sourceIP
|
||||||
|
}
|
||||||
|
|
||||||
|
if host := queryParams.Get("host"); host != "" {
|
||||||
|
f.Host = &host
|
||||||
|
}
|
||||||
|
|
||||||
|
if path := queryParams.Get("path"); path != "" {
|
||||||
|
f.Path = &path
|
||||||
|
}
|
||||||
|
|
||||||
|
if userID := queryParams.Get("user_id"); userID != "" {
|
||||||
|
f.UserID = &userID
|
||||||
|
}
|
||||||
|
|
||||||
|
if userEmail := queryParams.Get("user_email"); userEmail != "" {
|
||||||
|
f.UserEmail = &userEmail
|
||||||
|
}
|
||||||
|
|
||||||
|
if userName := queryParams.Get("user_name"); userName != "" {
|
||||||
|
f.UserName = &userName
|
||||||
|
}
|
||||||
|
|
||||||
|
if method := queryParams.Get("method"); method != "" {
|
||||||
|
f.Method = &method
|
||||||
|
}
|
||||||
|
|
||||||
|
if status := queryParams.Get("status"); status != "" {
|
||||||
|
f.Status = &status
|
||||||
|
}
|
||||||
|
|
||||||
|
if statusCodeStr := queryParams.Get("status_code"); statusCodeStr != "" {
|
||||||
|
if statusCode, err := strconv.Atoi(statusCodeStr); err == nil && statusCode > 0 {
|
||||||
|
f.StatusCode = &statusCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if startDate := queryParams.Get("start_date"); startDate != "" {
|
||||||
|
parsedStartDate, err := time.Parse(time.RFC3339, startDate)
|
||||||
|
if err == nil {
|
||||||
|
f.StartDate = &parsedStartDate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if endDate := queryParams.Get("end_date"); endDate != "" {
|
||||||
|
parsedEndDate, err := time.Parse(time.RFC3339, endDate)
|
||||||
|
if err == nil {
|
||||||
|
f.EndDate = &parsedEndDate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOffset calculates the database offset for pagination
|
||||||
|
func (f *AccessLogFilter) GetOffset() int {
|
||||||
|
return (f.Page - 1) * f.PageSize
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLimit returns the page size for database queries
|
||||||
|
func (f *AccessLogFilter) GetLimit() int {
|
||||||
|
return f.PageSize
|
||||||
|
}
|
||||||
@@ -0,0 +1,161 @@
|
|||||||
|
package accesslogs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAccessLogFilter_ParseFromRequest(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
queryParams map[string]string
|
||||||
|
expectedPage int
|
||||||
|
expectedPageSize int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "default values when no params provided",
|
||||||
|
queryParams: map[string]string{},
|
||||||
|
expectedPage: 1,
|
||||||
|
expectedPageSize: DefaultPageSize,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid page and page_size",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "2",
|
||||||
|
"page_size": "25",
|
||||||
|
},
|
||||||
|
expectedPage: 2,
|
||||||
|
expectedPageSize: 25,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "page_size exceeds max, should cap at MaxPageSize",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "1",
|
||||||
|
"page_size": "200",
|
||||||
|
},
|
||||||
|
expectedPage: 1,
|
||||||
|
expectedPageSize: MaxPageSize,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid page number, should use default",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "invalid",
|
||||||
|
"page_size": "10",
|
||||||
|
},
|
||||||
|
expectedPage: 1,
|
||||||
|
expectedPageSize: 10,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid page_size, should use default",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "2",
|
||||||
|
"page_size": "invalid",
|
||||||
|
},
|
||||||
|
expectedPage: 2,
|
||||||
|
expectedPageSize: DefaultPageSize,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "zero page number, should use default",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "0",
|
||||||
|
"page_size": "10",
|
||||||
|
},
|
||||||
|
expectedPage: 1,
|
||||||
|
expectedPageSize: 10,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "negative page number, should use default",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "-1",
|
||||||
|
"page_size": "10",
|
||||||
|
},
|
||||||
|
expectedPage: 1,
|
||||||
|
expectedPageSize: 10,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "zero page_size, should use default",
|
||||||
|
queryParams: map[string]string{
|
||||||
|
"page": "1",
|
||||||
|
"page_size": "0",
|
||||||
|
},
|
||||||
|
expectedPage: 1,
|
||||||
|
expectedPageSize: DefaultPageSize,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/test", nil)
|
||||||
|
q := req.URL.Query()
|
||||||
|
for key, value := range tt.queryParams {
|
||||||
|
q.Set(key, value)
|
||||||
|
}
|
||||||
|
req.URL.RawQuery = q.Encode()
|
||||||
|
|
||||||
|
filter := &AccessLogFilter{}
|
||||||
|
filter.ParseFromRequest(req)
|
||||||
|
|
||||||
|
assert.Equal(t, tt.expectedPage, filter.Page, "Page mismatch")
|
||||||
|
assert.Equal(t, tt.expectedPageSize, filter.PageSize, "PageSize mismatch")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogFilter_GetOffset(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
page int
|
||||||
|
pageSize int
|
||||||
|
expectedOffset int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "first page",
|
||||||
|
page: 1,
|
||||||
|
pageSize: 50,
|
||||||
|
expectedOffset: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "second page",
|
||||||
|
page: 2,
|
||||||
|
pageSize: 50,
|
||||||
|
expectedOffset: 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "third page with page size 25",
|
||||||
|
page: 3,
|
||||||
|
pageSize: 25,
|
||||||
|
expectedOffset: 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "page 10 with page size 10",
|
||||||
|
page: 10,
|
||||||
|
pageSize: 10,
|
||||||
|
expectedOffset: 90,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
filter := &AccessLogFilter{
|
||||||
|
Page: tt.page,
|
||||||
|
PageSize: tt.pageSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
offset := filter.GetOffset()
|
||||||
|
assert.Equal(t, tt.expectedOffset, offset)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccessLogFilter_GetLimit(t *testing.T) {
|
||||||
|
filter := &AccessLogFilter{
|
||||||
|
Page: 2,
|
||||||
|
PageSize: 25,
|
||||||
|
}
|
||||||
|
|
||||||
|
limit := filter.GetLimit()
|
||||||
|
assert.Equal(t, 25, limit, "GetLimit should return PageSize")
|
||||||
|
}
|
||||||
@@ -6,5 +6,5 @@ import (
|
|||||||
|
|
||||||
type Manager interface {
|
type Manager interface {
|
||||||
SaveAccessLog(ctx context.Context, proxyLog *AccessLogEntry) error
|
SaveAccessLog(ctx context.Context, proxyLog *AccessLogEntry) error
|
||||||
GetAllAccessLogs(ctx context.Context, accountID, userID string) ([]*AccessLogEntry, error)
|
GetAllAccessLogs(ctx context.Context, accountID, userID string, filter *AccessLogFilter) ([]*AccessLogEntry, int64, error)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,7 +30,10 @@ func (h *handler) getAccessLogs(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
logs, err := h.manager.GetAllAccessLogs(r.Context(), userAuth.AccountId, userAuth.UserId)
|
var filter accesslogs.AccessLogFilter
|
||||||
|
filter.ParseFromRequest(r)
|
||||||
|
|
||||||
|
logs, totalCount, err := h.manager.GetAllAccessLogs(r.Context(), userAuth.AccountId, userAuth.UserId, &filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
util.WriteError(r.Context(), err, w)
|
util.WriteError(r.Context(), err, w)
|
||||||
return
|
return
|
||||||
@@ -41,5 +44,21 @@ func (h *handler) getAccessLogs(w http.ResponseWriter, r *http.Request) {
|
|||||||
apiLogs = append(apiLogs, *log.ToAPIResponse())
|
apiLogs = append(apiLogs, *log.ToAPIResponse())
|
||||||
}
|
}
|
||||||
|
|
||||||
util.WriteJSONObject(r.Context(), w, apiLogs)
|
response := &api.ProxyAccessLogsResponse{
|
||||||
|
Data: apiLogs,
|
||||||
|
Page: filter.Page,
|
||||||
|
PageSize: filter.PageSize,
|
||||||
|
TotalRecords: int(totalCount),
|
||||||
|
TotalPages: getTotalPageCount(int(totalCount), filter.PageSize),
|
||||||
|
}
|
||||||
|
|
||||||
|
util.WriteJSONObject(r.Context(), w, response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getTotalPageCount calculates the total number of pages
|
||||||
|
func getTotalPageCount(totalCount, pageSize int) int {
|
||||||
|
if pageSize <= 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return (totalCount + pageSize - 1) / pageSize
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package manager
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
@@ -55,20 +56,53 @@ func (m *managerImpl) SaveAccessLog(ctx context.Context, logEntry *accesslogs.Ac
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAllAccessLogs retrieves all access logs for an account
|
// GetAllAccessLogs retrieves access logs for an account with pagination and filtering
|
||||||
func (m *managerImpl) GetAllAccessLogs(ctx context.Context, accountID, userID string) ([]*accesslogs.AccessLogEntry, error) {
|
func (m *managerImpl) GetAllAccessLogs(ctx context.Context, accountID, userID string, filter *accesslogs.AccessLogFilter) ([]*accesslogs.AccessLogEntry, int64, error) {
|
||||||
ok, err := m.permissionsManager.ValidateUserPermissions(ctx, accountID, userID, modules.Services, operations.Read)
|
ok, err := m.permissionsManager.ValidateUserPermissions(ctx, accountID, userID, modules.Services, operations.Read)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, status.NewPermissionValidationError(err)
|
return nil, 0, status.NewPermissionValidationError(err)
|
||||||
}
|
}
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, status.NewPermissionDeniedError()
|
return nil, 0, status.NewPermissionDeniedError()
|
||||||
}
|
}
|
||||||
|
|
||||||
logs, err := m.store.GetAccountAccessLogs(ctx, store.LockingStrengthNone, accountID)
|
if err := m.resolveUserFilters(ctx, accountID, filter); err != nil {
|
||||||
|
log.WithContext(ctx).Warnf("failed to resolve user filters: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logs, totalCount, err := m.store.GetAccountAccessLogs(ctx, store.LockingStrengthNone, accountID, *filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return logs, nil
|
return logs, totalCount, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveUserFilters converts user email/name filters to user ID filter
|
||||||
|
func (m *managerImpl) resolveUserFilters(ctx context.Context, accountID string, filter *accesslogs.AccessLogFilter) error {
|
||||||
|
if filter.UserEmail == nil && filter.UserName == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
users, err := m.store.GetAccountUsers(ctx, store.LockingStrengthNone, accountID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var matchingUserIDs []string
|
||||||
|
for _, user := range users {
|
||||||
|
if filter.UserEmail != nil && strings.Contains(strings.ToLower(user.Email), strings.ToLower(*filter.UserEmail)) {
|
||||||
|
matchingUserIDs = append(matchingUserIDs, user.Id)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if filter.UserName != nil && strings.Contains(strings.ToLower(user.Name), strings.ToLower(*filter.UserName)) {
|
||||||
|
matchingUserIDs = append(matchingUserIDs, user.Id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(matchingUserIDs) > 0 {
|
||||||
|
filter.UserID = &matchingUserIDs[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5061,14 +5061,31 @@ func (s *SqlStore) CreateAccessLog(ctx context.Context, logEntry *accesslogs.Acc
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAccountAccessLogs retrieves all access logs for a given account
|
// GetAccountAccessLogs retrieves access logs for a given account with pagination and filtering
|
||||||
func (s *SqlStore) GetAccountAccessLogs(ctx context.Context, lockStrength LockingStrength, accountID string) ([]*accesslogs.AccessLogEntry, error) {
|
func (s *SqlStore) GetAccountAccessLogs(ctx context.Context, lockStrength LockingStrength, accountID string, filter accesslogs.AccessLogFilter) ([]*accesslogs.AccessLogEntry, int64, error) {
|
||||||
var logs []*accesslogs.AccessLogEntry
|
var logs []*accesslogs.AccessLogEntry
|
||||||
|
var totalCount int64
|
||||||
|
|
||||||
|
baseQuery := s.db.WithContext(ctx).
|
||||||
|
Model(&accesslogs.AccessLogEntry{}).
|
||||||
|
Where(accountIDCondition, accountID)
|
||||||
|
|
||||||
|
baseQuery = s.applyAccessLogFilters(baseQuery, filter)
|
||||||
|
|
||||||
|
if err := baseQuery.Count(&totalCount).Error; err != nil {
|
||||||
|
log.WithContext(ctx).Errorf("failed to count access logs: %v", err)
|
||||||
|
return nil, 0, status.Errorf(status.Internal, "failed to count access logs")
|
||||||
|
}
|
||||||
|
|
||||||
query := s.db.WithContext(ctx).
|
query := s.db.WithContext(ctx).
|
||||||
Where(accountIDCondition, accountID).
|
Where(accountIDCondition, accountID)
|
||||||
|
|
||||||
|
query = s.applyAccessLogFilters(query, filter)
|
||||||
|
|
||||||
|
query = query.
|
||||||
Order("timestamp DESC").
|
Order("timestamp DESC").
|
||||||
Limit(1000)
|
Limit(filter.GetLimit()).
|
||||||
|
Offset(filter.GetOffset())
|
||||||
|
|
||||||
if lockStrength != LockingStrengthNone {
|
if lockStrength != LockingStrengthNone {
|
||||||
query = query.Clauses(clause.Locking{Strength: string(lockStrength)})
|
query = query.Clauses(clause.Locking{Strength: string(lockStrength)})
|
||||||
@@ -5077,10 +5094,64 @@ func (s *SqlStore) GetAccountAccessLogs(ctx context.Context, lockStrength Lockin
|
|||||||
result := query.Find(&logs)
|
result := query.Find(&logs)
|
||||||
if result.Error != nil {
|
if result.Error != nil {
|
||||||
log.WithContext(ctx).Errorf("failed to get access logs from store: %v", result.Error)
|
log.WithContext(ctx).Errorf("failed to get access logs from store: %v", result.Error)
|
||||||
return nil, status.Errorf(status.Internal, "failed to get access logs from store")
|
return nil, 0, status.Errorf(status.Internal, "failed to get access logs from store")
|
||||||
}
|
}
|
||||||
|
|
||||||
return logs, nil
|
return logs, totalCount, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyAccessLogFilters applies filter conditions to the query
|
||||||
|
func (s *SqlStore) applyAccessLogFilters(query *gorm.DB, filter accesslogs.AccessLogFilter) *gorm.DB {
|
||||||
|
if filter.Search != nil {
|
||||||
|
searchPattern := "%" + *filter.Search + "%"
|
||||||
|
query = query.Where(
|
||||||
|
"location_connection_ip LIKE ? OR host LIKE ? OR path LIKE ? OR CONCAT(host, path) LIKE ? OR user_id IN (SELECT id FROM users WHERE email LIKE ? OR name LIKE ?)",
|
||||||
|
searchPattern, searchPattern, searchPattern, searchPattern, searchPattern, searchPattern,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.SourceIP != nil {
|
||||||
|
query = query.Where("location_connection_ip = ?", *filter.SourceIP)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.Host != nil {
|
||||||
|
query = query.Where("host = ?", *filter.Host)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.Path != nil {
|
||||||
|
// Support LIKE pattern for path filtering
|
||||||
|
query = query.Where("path LIKE ?", "%"+*filter.Path+"%")
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.UserID != nil {
|
||||||
|
query = query.Where("user_id = ?", *filter.UserID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.Method != nil {
|
||||||
|
query = query.Where("method = ?", *filter.Method)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.Status != nil {
|
||||||
|
if *filter.Status == "success" {
|
||||||
|
query = query.Where("status_code >= ? AND status_code < ?", 200, 400)
|
||||||
|
} else if *filter.Status == "failed" {
|
||||||
|
query = query.Where("status_code < ? OR status_code >= ?", 200, 400)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.StatusCode != nil {
|
||||||
|
query = query.Where("status_code = ?", *filter.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.StartDate != nil {
|
||||||
|
query = query.Where("timestamp >= ?", *filter.StartDate)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filter.EndDate != nil {
|
||||||
|
query = query.Where("timestamp <= ?", *filter.EndDate)
|
||||||
|
}
|
||||||
|
|
||||||
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SqlStore) GetReverseProxyTargetByTargetID(ctx context.Context, lockStrength LockingStrength, accountID string, targetID string) (*reverseproxy.Target, error) {
|
func (s *SqlStore) GetReverseProxyTargetByTargetID(ctx context.Context, lockStrength LockingStrength, accountID string, targetID string) (*reverseproxy.Target, error) {
|
||||||
|
|||||||
@@ -266,7 +266,7 @@ type Store interface {
|
|||||||
DeleteCustomDomain(ctx context.Context, accountID string, domainID string) error
|
DeleteCustomDomain(ctx context.Context, accountID string, domainID string) error
|
||||||
|
|
||||||
CreateAccessLog(ctx context.Context, log *accesslogs.AccessLogEntry) error
|
CreateAccessLog(ctx context.Context, log *accesslogs.AccessLogEntry) error
|
||||||
GetAccountAccessLogs(ctx context.Context, lockStrength LockingStrength, accountID string) ([]*accesslogs.AccessLogEntry, error)
|
GetAccountAccessLogs(ctx context.Context, lockStrength LockingStrength, accountID string, filter accesslogs.AccessLogFilter) ([]*accesslogs.AccessLogEntry, int64, error)
|
||||||
GetReverseProxyTargetByTargetID(ctx context.Context, lockStrength LockingStrength, accountID string, targetID string) (*reverseproxy.Target, error)
|
GetReverseProxyTargetByTargetID(ctx context.Context, lockStrength LockingStrength, accountID string, targetID string) (*reverseproxy.Target, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2773,6 +2773,36 @@ components:
|
|||||||
- path
|
- path
|
||||||
- duration_ms
|
- duration_ms
|
||||||
- status_code
|
- status_code
|
||||||
|
ProxyAccessLogsResponse:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
|
type: array
|
||||||
|
description: List of proxy access log entries
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/ProxyAccessLog"
|
||||||
|
page:
|
||||||
|
type: integer
|
||||||
|
description: Current page number
|
||||||
|
example: 1
|
||||||
|
page_size:
|
||||||
|
type: integer
|
||||||
|
description: Number of items per page
|
||||||
|
example: 50
|
||||||
|
total_records:
|
||||||
|
type: integer
|
||||||
|
description: Total number of log records available
|
||||||
|
example: 523
|
||||||
|
total_pages:
|
||||||
|
type: integer
|
||||||
|
description: Total number of pages available
|
||||||
|
example: 11
|
||||||
|
required:
|
||||||
|
- data
|
||||||
|
- page
|
||||||
|
- page_size
|
||||||
|
- total_records
|
||||||
|
- total_pages
|
||||||
IdentityProviderType:
|
IdentityProviderType:
|
||||||
type: string
|
type: string
|
||||||
description: Type of identity provider
|
description: Type of identity provider
|
||||||
@@ -6341,17 +6371,97 @@ paths:
|
|||||||
/api/events/proxy:
|
/api/events/proxy:
|
||||||
get:
|
get:
|
||||||
summary: List all Reverse Proxy Access Logs
|
summary: List all Reverse Proxy Access Logs
|
||||||
description: Returns a list of all reverse proxy access log entries
|
description: Returns a paginated list of all reverse proxy access log entries
|
||||||
tags: [ Events ]
|
tags: [ Events ]
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
minimum: 1
|
||||||
|
description: Page number for pagination (1-indexed)
|
||||||
|
- in: query
|
||||||
|
name: page_size
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 50
|
||||||
|
minimum: 1
|
||||||
|
maximum: 100
|
||||||
|
description: Number of items per page (max 100)
|
||||||
|
- in: query
|
||||||
|
name: search
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: General search across host, path, source IP, user email, and user name
|
||||||
|
- in: query
|
||||||
|
name: source_ip
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Filter by source IP address
|
||||||
|
- in: query
|
||||||
|
name: host
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Filter by host header
|
||||||
|
- in: query
|
||||||
|
name: path
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Filter by request path (supports partial matching)
|
||||||
|
- in: query
|
||||||
|
name: user_id
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Filter by authenticated user ID
|
||||||
|
- in: query
|
||||||
|
name: user_email
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Filter by user email (partial matching)
|
||||||
|
- in: query
|
||||||
|
name: user_name
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
description: Filter by user name (partial matching)
|
||||||
|
- in: query
|
||||||
|
name: method
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [GET, POST, PUT, PATCH, DELETE, HEAD, OPTIONS]
|
||||||
|
description: Filter by HTTP method
|
||||||
|
- in: query
|
||||||
|
name: status
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [success, failed]
|
||||||
|
description: Filter by status (success = 2xx/3xx, failed = 1xx/4xx/5xx)
|
||||||
|
- in: query
|
||||||
|
name: status_code
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 100
|
||||||
|
maximum: 599
|
||||||
|
description: Filter by HTTP status code
|
||||||
|
- in: query
|
||||||
|
name: start_date
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: Filter by timestamp >= start_date (RFC3339 format)
|
||||||
|
- in: query
|
||||||
|
name: end_date
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: Filter by timestamp <= end_date (RFC3339 format)
|
||||||
responses:
|
responses:
|
||||||
"200":
|
"200":
|
||||||
description: List of reverse proxy access logs
|
description: Paginated list of reverse proxy access logs
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
type: array
|
$ref: "#/components/schemas/ProxyAccessLogsResponse"
|
||||||
items:
|
|
||||||
$ref: "#/components/schemas/ProxyAccessLog"
|
|
||||||
'401':
|
'401':
|
||||||
"$ref": "#/components/responses/requires_authentication"
|
"$ref": "#/components/responses/requires_authentication"
|
||||||
'403':
|
'403':
|
||||||
|
|||||||
@@ -1950,6 +1950,24 @@ type ProxyAccessLog struct {
|
|||||||
UserId *string `json:"user_id,omitempty"`
|
UserId *string `json:"user_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ProxyAccessLogsResponse defines model for ProxyAccessLogsResponse.
|
||||||
|
type ProxyAccessLogsResponse struct {
|
||||||
|
// Data List of proxy access log entries
|
||||||
|
Data []ProxyAccessLog `json:"data"`
|
||||||
|
|
||||||
|
// Page Current page number
|
||||||
|
Page int `json:"page"`
|
||||||
|
|
||||||
|
// PageSize Number of items per page
|
||||||
|
PageSize int `json:"page_size"`
|
||||||
|
|
||||||
|
// TotalPages Total number of pages available
|
||||||
|
TotalPages int `json:"total_pages"`
|
||||||
|
|
||||||
|
// TotalRecords Total number of log records available
|
||||||
|
TotalRecords int `json:"total_records"`
|
||||||
|
}
|
||||||
|
|
||||||
// ProxyCluster A proxy cluster represents a group of proxy nodes serving the same address
|
// ProxyCluster A proxy cluster represents a group of proxy nodes serving the same address
|
||||||
type ProxyCluster struct {
|
type ProxyCluster struct {
|
||||||
// Address Cluster address used for CNAME targets
|
// Address Cluster address used for CNAME targets
|
||||||
@@ -2655,6 +2673,15 @@ type GetApiEventsNetworkTrafficParamsConnectionType string
|
|||||||
// GetApiEventsNetworkTrafficParamsDirection defines parameters for GetApiEventsNetworkTraffic.
|
// GetApiEventsNetworkTrafficParamsDirection defines parameters for GetApiEventsNetworkTraffic.
|
||||||
type GetApiEventsNetworkTrafficParamsDirection string
|
type GetApiEventsNetworkTrafficParamsDirection string
|
||||||
|
|
||||||
|
// GetApiEventsProxyParams defines parameters for GetApiEventsProxy.
|
||||||
|
type GetApiEventsProxyParams struct {
|
||||||
|
// Page Page number for pagination (1-indexed)
|
||||||
|
Page *int `form:"page,omitempty" json:"page,omitempty"`
|
||||||
|
|
||||||
|
// PageSize Number of items per page (max 100)
|
||||||
|
PageSize *int `form:"page_size,omitempty" json:"page_size,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
// GetApiGroupsParams defines parameters for GetApiGroups.
|
// GetApiGroupsParams defines parameters for GetApiGroups.
|
||||||
type GetApiGroupsParams struct {
|
type GetApiGroupsParams struct {
|
||||||
// Name Filter groups by name (exact match)
|
// Name Filter groups by name (exact match)
|
||||||
|
|||||||
Reference in New Issue
Block a user