mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-24 09:14:46 +01:00
1041 paginate people modal (#1042)
* initial working * add test * fix test * update paginating format * merge master * fix errors * lint error * use results instead of result * more lint errors
This commit is contained in:
parent
dfc3599044
commit
5f47e97df2
@ -2,12 +2,12 @@ import React from 'react'
|
||||
import { useActions, useValues } from 'kea'
|
||||
import moment from 'moment'
|
||||
import { trendsLogic } from 'scenes/trends/trendsLogic'
|
||||
import { Modal, Button } from 'antd'
|
||||
import { Modal, Button, Spin } from 'antd'
|
||||
import { PeopleTable } from 'scenes/users/PeopleTable'
|
||||
|
||||
export function PeopleModal({ visible }) {
|
||||
const { people, filters } = useValues(trendsLogic({ id: null }))
|
||||
const { setShowingPeople } = useActions(trendsLogic({ dashboardItemId: null }))
|
||||
const { setShowingPeople, loadMorePeople } = useActions(trendsLogic({ dashboardItemId: null }))
|
||||
|
||||
const title =
|
||||
filters.shown_as === 'Stickiness'
|
||||
@ -26,13 +26,24 @@ export function PeopleModal({ visible }) {
|
||||
{people ? (
|
||||
<p>
|
||||
Found {people.count} {people.count === 1 ? 'user' : 'users'}
|
||||
{people.count > 100 ? '. Showing the first 100 below.' : ''}
|
||||
</p>
|
||||
) : (
|
||||
<p>Loading users...</p>
|
||||
)}
|
||||
|
||||
<PeopleTable loading={!people?.people} people={people?.people} />
|
||||
<div
|
||||
style={{
|
||||
margin: '1rem',
|
||||
textAlign: 'center',
|
||||
}}
|
||||
>
|
||||
{people?.next && (
|
||||
<Button type="primary" onClick={loadMorePeople}>
|
||||
{people?.loadingMore ? <Spin /> : 'Load more people'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
@ -81,6 +81,32 @@ function autocorrectInterval({ date_from, interval }) {
|
||||
}
|
||||
}
|
||||
|
||||
function parsePeopleParams(peopleParams, filters) {
|
||||
const { action, day, breakdown_value } = peopleParams
|
||||
const params = filterClientSideParams({
|
||||
...filters,
|
||||
entityId: action.id,
|
||||
type: action.type,
|
||||
breakdown_value,
|
||||
})
|
||||
|
||||
if (filters.shown_as === STICKINESS) {
|
||||
params.stickiness_days = day
|
||||
} else if (params.display === ACTIONS_LINE_GRAPH_CUMULATIVE) {
|
||||
params.date_to = day
|
||||
} else {
|
||||
params.date_from = day
|
||||
params.date_to = day
|
||||
}
|
||||
// If breakdown type is cohort, we use breakdown_value
|
||||
// If breakdown type is event, we just set another filter
|
||||
if (breakdown_value && filters.breakdown_type != 'cohort') {
|
||||
params.properties = [...params.properties, { key: params.breakdown, value: breakdown_value, type: 'event' }]
|
||||
}
|
||||
|
||||
return toAPIParams(params)
|
||||
}
|
||||
|
||||
// props:
|
||||
// - dashboardItemId
|
||||
// - filters
|
||||
@ -121,14 +147,17 @@ export const trendsLogic = kea({
|
||||
setDisplay: display => ({ display }),
|
||||
|
||||
loadPeople: (action, label, day, breakdown_value) => ({ action, label, day, breakdown_value }),
|
||||
loadMorePeople: true,
|
||||
setLoadingMorePeople: status => ({ status }),
|
||||
setShowingPeople: isShowing => ({ isShowing }),
|
||||
setPeople: (people, count, action, label, day, breakdown_value) => ({
|
||||
setPeople: (people, count, action, label, day, breakdown_value, next) => ({
|
||||
people,
|
||||
count,
|
||||
action,
|
||||
label,
|
||||
day,
|
||||
breakdown_value,
|
||||
next,
|
||||
}),
|
||||
setActiveView: type => ({ type }),
|
||||
setCachedUrl: (type, url) => ({ type, url }),
|
||||
@ -151,6 +180,7 @@ export const trendsLogic = kea({
|
||||
{
|
||||
[actions.setFilters]: () => null,
|
||||
[actions.setPeople]: (_, people) => people,
|
||||
[actions.setLoadingMorePeople]: (state, { status }) => ({ ...state, loadingMore: status }),
|
||||
},
|
||||
],
|
||||
cachedUrls: [
|
||||
@ -183,35 +213,35 @@ export const trendsLogic = kea({
|
||||
actions.setFilters({ display })
|
||||
},
|
||||
[actions.loadPeople]: async ({ label, action, day, breakdown_value }, breakpoint) => {
|
||||
const params = filterClientSideParams({
|
||||
...values.filters,
|
||||
entityId: action.id,
|
||||
type: action.type,
|
||||
breakdown_value,
|
||||
})
|
||||
|
||||
if (values.filters.shown_as === STICKINESS) {
|
||||
params.stickiness_days = day
|
||||
} else if (params.display === ACTIONS_LINE_GRAPH_CUMULATIVE) {
|
||||
params.date_to = day
|
||||
} else {
|
||||
params.date_from = day
|
||||
params.date_to = day
|
||||
}
|
||||
// If breakdown type is cohort, we use breakdown_value
|
||||
// If breakdown type is event, we just set another filter
|
||||
if (breakdown_value && values.filters.breakdown_type != 'cohort') {
|
||||
params.properties = [
|
||||
...params.properties,
|
||||
{ key: params.breakdown, value: breakdown_value, type: 'event' },
|
||||
]
|
||||
}
|
||||
|
||||
const filterParams = toAPIParams(params)
|
||||
actions.setPeople(null, null, action, label, day, breakdown_value)
|
||||
const filterParams = parsePeopleParams({ label, action, day, breakdown_value }, values.filters)
|
||||
actions.setPeople(null, null, action, label, day, breakdown_value, null)
|
||||
const people = await api.get(`api/action/people/?include_last_event=1&${filterParams}`)
|
||||
breakpoint()
|
||||
actions.setPeople(people[0]?.people, people[0]?.count, action, label, day, breakdown_value)
|
||||
actions.setPeople(
|
||||
people.results[0]?.people,
|
||||
people.results[0]?.count,
|
||||
action,
|
||||
label,
|
||||
day,
|
||||
breakdown_value,
|
||||
people.next
|
||||
)
|
||||
},
|
||||
[actions.loadMorePeople]: async (_, breakpoint) => {
|
||||
const { people: currPeople, count, action, label, day, breakdown_value, next } = values.people
|
||||
actions.setLoadingMorePeople(true)
|
||||
const people = await api.get(next)
|
||||
actions.setLoadingMorePeople(false)
|
||||
breakpoint()
|
||||
actions.setPeople(
|
||||
[...currPeople, ...people.results[0]?.people],
|
||||
count + people.results[0]?.count,
|
||||
action,
|
||||
label,
|
||||
day,
|
||||
breakdown_value,
|
||||
people.next
|
||||
)
|
||||
},
|
||||
}),
|
||||
|
||||
|
@ -38,7 +38,7 @@ export function PeopleTable({ people, loading, actions, onChange }) {
|
||||
columns={columns}
|
||||
loading={loading}
|
||||
rowKey={person => person.id}
|
||||
pagination={{ pageSize: 100, hideOnSinglePage: true }}
|
||||
pagination={{ pageSize: 99999, hideOnSinglePage: true }}
|
||||
expandable={{
|
||||
expandedRowRender: function RenderPropertiesTable({ properties }) {
|
||||
return <PropertiesTable properties={properties} />
|
||||
|
@ -105,7 +105,9 @@ def get_actions(queryset: QuerySet, params: dict, team_id: int) -> QuerySet:
|
||||
queryset = queryset.filter(
|
||||
pk__in=[
|
||||
action.id
|
||||
for action in Filter({"actions": json.loads(params.get("actions", "[]"))}).actions
|
||||
for action in Filter(
|
||||
{"actions": json.loads(params.get("actions", "[]"))}
|
||||
).actions
|
||||
]
|
||||
)
|
||||
|
||||
@ -131,7 +133,9 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
queryset = super().get_queryset()
|
||||
if self.action == "list": # type: ignore
|
||||
queryset = queryset.filter(deleted=False)
|
||||
return get_actions(queryset, self.request.GET.dict(), self.request.user.team_set.get().pk)
|
||||
return get_actions(
|
||||
queryset, self.request.GET.dict(), self.request.user.team_set.get().pk
|
||||
)
|
||||
|
||||
def create(self, request: request.Request, *args: Any, **kwargs: Any) -> Response:
|
||||
action, created = Action.objects.get_or_create(
|
||||
@ -144,7 +148,9 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
},
|
||||
)
|
||||
if not created:
|
||||
return Response(data={"detail": "action-exists", "id": action.pk}, status=400)
|
||||
return Response(
|
||||
data={"detail": "action-exists", "id": action.pk}, status=400
|
||||
)
|
||||
|
||||
if request.data.get("steps"):
|
||||
for step in request.data["steps"]:
|
||||
@ -195,7 +201,9 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
actions = self.get_queryset()
|
||||
actions_list: List[Dict[Any, Any]] = ActionSerializer(actions, many=True, context={"request": request}).data # type: ignore
|
||||
if request.GET.get("include_count", False):
|
||||
actions_list.sort(key=lambda action: action.get("count", action["id"]), reverse=True)
|
||||
actions_list.sort(
|
||||
key=lambda action: action.get("count", action["id"]), reverse=True
|
||||
)
|
||||
return Response({"results": actions_list})
|
||||
|
||||
@action(methods=["GET"], detail=False)
|
||||
@ -223,14 +231,17 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
def people(self, request: request.Request, *args: Any, **kwargs: Any) -> Response:
|
||||
team = request.user.team_set.get()
|
||||
filter = Filter(request=request)
|
||||
offset = int(request.GET.get("offset", 0))
|
||||
|
||||
def _calculate_people(events: QuerySet):
|
||||
def _calculate_people(events: QuerySet, offset: int):
|
||||
shown_as = request.GET.get("shown_as")
|
||||
if shown_as is not None and shown_as == "Stickiness":
|
||||
stickiness_days = int(request.GET["stickiness_days"])
|
||||
events = (
|
||||
events.values("person_id")
|
||||
.annotate(day_count=Count(functions.TruncDay("timestamp"), distinct=True))
|
||||
.annotate(
|
||||
day_count=Count(functions.TruncDay("timestamp"), distinct=True)
|
||||
)
|
||||
.filter(day_count=stickiness_days)
|
||||
)
|
||||
else:
|
||||
@ -250,7 +261,8 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
)
|
||||
|
||||
people = Person.objects.filter(
|
||||
team=team, id__in=[p["person_id"] for p in events[0:100]]
|
||||
team=team,
|
||||
id__in=[p["person_id"] for p in events[offset : offset + 100]],
|
||||
)
|
||||
|
||||
people = people.prefetch_related(
|
||||
@ -270,7 +282,9 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
if len(filter.entities) >= 1:
|
||||
entity = filter.entities[0]
|
||||
else:
|
||||
entity = Entity({"id": request.GET["entityId"], "type": request.GET["type"]})
|
||||
entity = Entity(
|
||||
{"id": request.GET["entityId"], "type": request.GET["type"]}
|
||||
)
|
||||
|
||||
if entity.type == TREND_FILTER_TYPE_EVENTS:
|
||||
filtered_events = process_entity_for_events(
|
||||
@ -287,8 +301,28 @@ class ActionViewSet(viewsets.ModelViewSet):
|
||||
entity, team_id=team.pk, order_by=None
|
||||
).filter(filter_events(team.pk, filter, entity))
|
||||
|
||||
people = _calculate_people(events=filtered_events)
|
||||
return Response([people])
|
||||
people = _calculate_people(events=filtered_events, offset=offset)
|
||||
|
||||
current_url = request.get_full_path()
|
||||
next_url: Optional[str] = request.get_full_path()
|
||||
if people["count"] > 99 and next_url:
|
||||
if "offset" in next_url:
|
||||
next_url = next_url[1:]
|
||||
next_url = next_url.replace(
|
||||
"offset=" + str(offset), "offset=" + str(offset + 100)
|
||||
)
|
||||
else:
|
||||
next_url = request.build_absolute_uri(
|
||||
"{}{}offset={}".format(
|
||||
next_url, "&" if "?" in next_url else "?", offset + 100
|
||||
)
|
||||
)
|
||||
else:
|
||||
next_url = None
|
||||
|
||||
return Response(
|
||||
{"results": [people], "next": next_url, "previous": current_url[1:]}
|
||||
)
|
||||
|
||||
|
||||
def calculate_trends(
|
||||
@ -301,7 +335,13 @@ def calculate_trends(
|
||||
if len(filter.entities) == 0:
|
||||
# If no filters, automatically grab all actions and show those instead
|
||||
filter.entities = [
|
||||
Entity({"id": action.id, "name": action.name, "type": TREND_FILTER_TYPE_ACTIONS,})
|
||||
Entity(
|
||||
{
|
||||
"id": action.id,
|
||||
"name": action.name,
|
||||
"type": TREND_FILTER_TYPE_ACTIONS,
|
||||
}
|
||||
)
|
||||
for action in actions
|
||||
]
|
||||
|
||||
@ -340,7 +380,9 @@ def calculate_trends(
|
||||
)
|
||||
|
||||
compared_trend_entity = convert_to_comparison(
|
||||
compared_trend_entity, compared_filter, "{} - {}".format(entity.name, "previous"),
|
||||
compared_trend_entity,
|
||||
compared_filter,
|
||||
"{} - {}".format(entity.name, "previous"),
|
||||
)
|
||||
entities_list.extend(compared_trend_entity)
|
||||
else:
|
||||
@ -378,9 +420,13 @@ def build_dataframe(
|
||||
]
|
||||
)
|
||||
if interval == "week":
|
||||
dataframe["date"] = dataframe["date"].apply(lambda x: x - pd.offsets.Week(weekday=6))
|
||||
dataframe["date"] = dataframe["date"].apply(
|
||||
lambda x: x - pd.offsets.Week(weekday=6)
|
||||
)
|
||||
elif interval == "month":
|
||||
dataframe["date"] = dataframe["date"].apply(lambda x: x - pd.offsets.MonthEnd(n=1))
|
||||
dataframe["date"] = dataframe["date"].apply(
|
||||
lambda x: x - pd.offsets.MonthEnd(n=1)
|
||||
)
|
||||
return dataframe
|
||||
|
||||
|
||||
@ -415,13 +461,15 @@ def group_events_to_date(
|
||||
df_dates = pd.DataFrame(filtered.groupby("date").mean(), index=time_index)
|
||||
df_dates = df_dates.fillna(0)
|
||||
response[value] = {
|
||||
key: value[0] if len(value) > 0 else 0 for key, value in df_dates.iterrows()
|
||||
key: value[0] if len(value) > 0 else 0
|
||||
for key, value in df_dates.iterrows()
|
||||
}
|
||||
else:
|
||||
dataframe = pd.DataFrame([], index=time_index)
|
||||
dataframe = dataframe.fillna(0)
|
||||
response["total"] = {
|
||||
key: value[0] if len(value) > 0 else 0 for key, value in dataframe.iterrows()
|
||||
key: value[0] if len(value) > 0 else 0
|
||||
for key, value in dataframe.iterrows()
|
||||
}
|
||||
return response
|
||||
|
||||
@ -444,13 +492,15 @@ def get_interval_annotation(key: str) -> Dict[str, Any]:
|
||||
def add_cohort_annotations(
|
||||
team_id: int, breakdown: List[Union[int, str]]
|
||||
) -> Dict[str, Union[Value, Exists]]:
|
||||
cohorts = Cohort.objects.filter(team_id=team_id, pk__in=[b for b in breakdown if b != "all"])
|
||||
cohorts = Cohort.objects.filter(
|
||||
team_id=team_id, pk__in=[b for b in breakdown if b != "all"]
|
||||
)
|
||||
annotations: Dict[str, Union[Value, Exists]] = {}
|
||||
for cohort in cohorts:
|
||||
annotations["cohort_{}".format(cohort.pk)] = Exists(
|
||||
CohortPeople.objects.filter(cohort=cohort.pk, person_id=OuterRef("person_id")).only(
|
||||
"id"
|
||||
)
|
||||
CohortPeople.objects.filter(
|
||||
cohort=cohort.pk, person_id=OuterRef("person_id")
|
||||
).only("id")
|
||||
)
|
||||
if "all" in breakdown:
|
||||
annotations["cohort_all"] = Value(True, output_field=BooleanField())
|
||||
@ -470,7 +520,9 @@ def aggregate_by_interval(
|
||||
values = [interval]
|
||||
if breakdown:
|
||||
if params.get("breakdown_type") == "cohort":
|
||||
annotations = add_cohort_annotations(team_id, json.loads(params.get("breakdown", "[]")))
|
||||
annotations = add_cohort_annotations(
|
||||
team_id, json.loads(params.get("breakdown", "[]"))
|
||||
)
|
||||
values.extend(annotations.keys())
|
||||
filtered_events = filtered_events.annotate(**annotations)
|
||||
breakdown = "cohorts"
|
||||
@ -551,11 +603,14 @@ def stickiness(
|
||||
}
|
||||
|
||||
|
||||
def breakdown_label(entity: Entity, value: Union[str, int]) -> Dict[str, Optional[Union[str, int]]]:
|
||||
def breakdown_label(
|
||||
entity: Entity, value: Union[str, int]
|
||||
) -> Dict[str, Optional[Union[str, int]]]:
|
||||
ret_dict: Dict[str, Optional[Union[str, int]]] = {}
|
||||
if not value or not isinstance(value, str) or "cohort_" not in value:
|
||||
ret_dict["label"] = "{} - {}".format(
|
||||
entity.name, value if value and value != "None" and value != "nan" else "Other",
|
||||
entity.name,
|
||||
value if value and value != "None" and value != "nan" else "Other",
|
||||
)
|
||||
ret_dict["breakdown_value"] = value if value and not pd.isna(value) else None
|
||||
else:
|
||||
@ -607,14 +662,18 @@ def serialize_entity(
|
||||
new_dict = copy.deepcopy(serialized)
|
||||
if value != "Total":
|
||||
new_dict.update(breakdown_label(entity, value))
|
||||
new_dict.update(append_data(dates_filled=list(item.items()), interval=interval))
|
||||
new_dict.update(
|
||||
append_data(dates_filled=list(item.items()), interval=interval)
|
||||
)
|
||||
if filter.display == TRENDS_CUMULATIVE:
|
||||
new_dict["data"] = np.cumsum(new_dict["data"])
|
||||
response.append(new_dict)
|
||||
elif params.get("shown_as") == TRENDS_STICKINESS:
|
||||
new_dict = copy.deepcopy(serialized)
|
||||
new_dict.update(
|
||||
stickiness(filtered_events=events, entity=entity, filter=filter, team_id=team_id)
|
||||
stickiness(
|
||||
filtered_events=events, entity=entity, filter=filter, team_id=team_id
|
||||
)
|
||||
)
|
||||
response.append(new_dict)
|
||||
|
||||
@ -622,7 +681,9 @@ def serialize_entity(
|
||||
|
||||
|
||||
def serialize_people(people: QuerySet, request: request.Request) -> Dict:
|
||||
people_dict = [PersonSerializer(person, context={"request": request}).data for person in people]
|
||||
people_dict = [
|
||||
PersonSerializer(person, context={"request": request}).data for person in people
|
||||
]
|
||||
return {"people": people_dict, "count": len(people_dict)}
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -6,18 +6,20 @@ import json
|
||||
from posthog.celery import update_cache_item
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def generate_cache_key(obj):
|
||||
stringified = json.dumps(obj)
|
||||
return hashlib.md5(stringified.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
TRENDS_ENDPOINT = 'Trends'
|
||||
FUNNEL_ENDPOINT = 'Funnel'
|
||||
TRENDS_ENDPOINT = "Trends"
|
||||
FUNNEL_ENDPOINT = "Funnel"
|
||||
|
||||
|
||||
def cached_function(cache_type: str, expiry=30):
|
||||
def inner_decorator(f):
|
||||
def wrapper(*args, **kw):
|
||||
cache_key = ''
|
||||
cache_key = ""
|
||||
_expiry = expiry
|
||||
|
||||
# prepare caching params
|
||||
@ -30,28 +32,34 @@ def cached_function(cache_type: str, expiry=30):
|
||||
|
||||
if cache_type == TRENDS_ENDPOINT:
|
||||
request = args[1]
|
||||
filter = Filter(request=request)
|
||||
filter = Filter(request=request)
|
||||
params = request.GET.dict()
|
||||
refresh = params.pop('refresh', None)
|
||||
refresh = params.pop("refresh", None)
|
||||
team = request.user.team_set.get()
|
||||
cache_key = generate_cache_key(json.dumps(params) + '_' + str(team.pk))
|
||||
payload = {'filter': filter.toJSON(), 'params': params, 'team_id': team.pk}
|
||||
cache_key = generate_cache_key(json.dumps(params) + "_" + str(team.pk))
|
||||
payload = {
|
||||
"filter": filter.toJSON(),
|
||||
"params": params,
|
||||
"team_id": team.pk,
|
||||
}
|
||||
elif cache_type == FUNNEL_ENDPOINT:
|
||||
request = args[1]
|
||||
pk = args[2]
|
||||
params = request.GET.dict()
|
||||
refresh = params.pop('refresh', None)
|
||||
refresh = params.pop("refresh", None)
|
||||
team = request.user.team_set.get()
|
||||
cache_key = generate_cache_key(str(pk) + '_' + str(team.pk))
|
||||
payload = {'pk': pk, 'params': params, 'team_id': team.pk}
|
||||
cache_key = generate_cache_key(str(pk) + "_" + str(team.pk))
|
||||
payload = {"pk": pk, "params": params, "team_id": team.pk}
|
||||
|
||||
if params and payload and params.get('from_dashboard'): #cache for 30 minutes if dashboard item
|
||||
cache_key = cache_key + '_' + 'dashboard'
|
||||
if (
|
||||
params and payload and params.get("from_dashboard")
|
||||
): # cache for 30 minutes if dashboard item
|
||||
cache_key = cache_key + "_" + "dashboard"
|
||||
_expiry = 900
|
||||
dashboard_item_id = params.get('from_dashboard')
|
||||
payload.update({'dashboard_id': dashboard_item_id})
|
||||
|
||||
cache_key = cache_key + '_' + cache_type
|
||||
dashboard_item_id = params.get("from_dashboard")
|
||||
payload.update({"dashboard_id": dashboard_item_id})
|
||||
|
||||
cache_key = cache_key + "_" + cache_type
|
||||
|
||||
if refresh and dashboard_item_id:
|
||||
dashboard_item = DashboardItem.objects.filter(pk=dashboard_item_id)
|
||||
@ -62,16 +70,27 @@ def cached_function(cache_type: str, expiry=30):
|
||||
|
||||
# return result if cached
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result :
|
||||
return cached_result['result']
|
||||
if cached_result:
|
||||
return cached_result["result"]
|
||||
|
||||
# call wrapped function
|
||||
result = f(*args, **kw)
|
||||
|
||||
# cache new data using
|
||||
if result and payload:
|
||||
cache.set(cache_key, {'result':result, 'details': payload, 'type': cache_type, 'last_accessed': datetime.now()}, _expiry)
|
||||
cache.set(
|
||||
cache_key,
|
||||
{
|
||||
"result": result,
|
||||
"details": payload,
|
||||
"type": cache_type,
|
||||
"last_accessed": datetime.now(),
|
||||
},
|
||||
_expiry,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
return inner_decorator
|
||||
|
||||
return inner_decorator
|
||||
|
@ -34,9 +34,11 @@ function createEntry(entry) {
|
||||
filename: '[name].js',
|
||||
chunkFilename: '[name].[contenthash].js',
|
||||
publicPath:
|
||||
process.env.NODE_ENV === 'production' ? '/static/'
|
||||
: process.env.IS_PORTER ? `https://${process.env.PORTER_WEBPACK_HOST}/static/`
|
||||
: `http${process.env.LOCAL_HTTPS ? 's' : ''}://${webpackDevServerHost}:8234/static/`
|
||||
process.env.NODE_ENV === 'production'
|
||||
? '/static/'
|
||||
: process.env.IS_PORTER
|
||||
? `https://${process.env.PORTER_WEBPACK_HOST}/static/`
|
||||
: `http${process.env.LOCAL_HTTPS ? 's' : ''}://${webpackDevServerHost}:8234/static/`,
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
@ -147,15 +149,12 @@ function createEntry(entry) {
|
||||
hot: true,
|
||||
host: webpackDevServerHost,
|
||||
port: 8234,
|
||||
public: (process.env.IS_PORTER ?
|
||||
`https://${process.env.PORTER_WEBPACK_HOST}`
|
||||
: `http${process.env.LOCAL_HTTPS ? 's' : ''}://${webpackDevServerHost}:8234`),
|
||||
allowedHosts: (process.env.IS_PORTER ?
|
||||
[
|
||||
`${process.env.PORTER_WEBPACK_HOST}`,
|
||||
`${process.env.PORTER_SERVER_HOST}`
|
||||
]
|
||||
: []),
|
||||
public: process.env.IS_PORTER
|
||||
? `https://${process.env.PORTER_WEBPACK_HOST}`
|
||||
: `http${process.env.LOCAL_HTTPS ? 's' : ''}://${webpackDevServerHost}:8234`,
|
||||
allowedHosts: process.env.IS_PORTER
|
||||
? [`${process.env.PORTER_WEBPACK_HOST}`, `${process.env.PORTER_SERVER_HOST}`]
|
||||
: [],
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Headers': '*',
|
||||
|
Loading…
Reference in New Issue
Block a user