Skip to content

Commit

Permalink
Merge branch 'stable'
Browse files Browse the repository at this point in the history
  • Loading branch information
gdixon committed Nov 1, 2021
2 parents 55e6c22 + b8b8552 commit d2e56c3
Show file tree
Hide file tree
Showing 14 changed files with 129 additions and 52 deletions.
6 changes: 3 additions & 3 deletions app/app/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ def preprocess(request):

user_is_authenticated = request.user.is_authenticated if hasattr(request, 'user') else None
profile = request.user.profile if user_is_authenticated and hasattr(request.user, 'profile') else None
if user_is_authenticated and profile and profile.pk:
user_is_authenticated_and_valid = user_is_authenticated and profile and profile.pk
if user_is_authenticated_and_valid:
# what actions to take?
should_record_join = not profile.last_visit
should_record_visit = not profile.last_visit or profile.last_visit < (
Expand Down Expand Up @@ -114,8 +115,7 @@ def preprocess(request):

# town square wall post max length
max_length_offset = abs(((
request.user.profile.created_on
if hasattr(request.user, 'profile') and request.user.is_authenticated else timezone.now()
request.user.profile.created_on if user_is_authenticated_and_valid else timezone.now()
) - timezone.now()).days)
max_length = 600 + max_length_offset

Expand Down
6 changes: 3 additions & 3 deletions app/app/sitemaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,12 +196,12 @@ def location(self, item):
'grants': GrantsSitemap,
'hackathons': HackathonEventSiteMap,
'projects': HackathonProjectSiteMap,
'profiles': ProfileSitemap,
'posts': PostSitemap,
# 'profiles': ProfileSitemap,
# 'posts': PostSitemap,
'quests': QuestsSitemap,
'issues': IssueSitemap,
'kudos': KudosSitemap,
'activity': ActivitySitemap,
# 'activity': ActivitySitemap,
'landers': ContributorLandingPageSitemap,
'results': ResultsSitemap,
'static': StaticViewSitemap,
Expand Down
5 changes: 4 additions & 1 deletion app/assets/v2/js/pages/results.js
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,9 @@ $(document).ready(function() {
});
}
});
setTimeout(function() {
$('#grants_results').html(' <iframe src="https://metabase.gitcoin.co/public/question/437a0ed4-bcb7-41eb-af63-e67c0965d200" frameborder="0" width="100%" height="600" allowtransparency></iframe>');
}, 2000);
setTimeout(function() {
$('#leaderboard_nav .nav-link:first-child').click();

Expand All @@ -160,7 +163,7 @@ $(document).ready(function() {
</div>
<div class="row py-1">
<div class="col-12 offset-md-0 d-flex justify-content-center align-items-center ">
<blockquote class="twitter-tweet"><p lang="en" dir="ltr">I&#39;m genuinely amazed by the projects in the <a href="https://twitter.com/ArweaveTeam?ref_src=twsrc%5Etfw">@ArweaveTeam</a> <a href="https://twitter.com/gitcoin?ref_src=twsrc%5Etfw">@gitcoin</a> incubator, these are incredible. Wow.</p>&mdash; Lasse Clausen (@lalleclausen) <a href="https://twitter.com/lalleclausen/status/1294320204470722560?ref_src=twsrc%5Etfw">August 14, 2020</a></blockquote>
<blockquote class="twitter-tweet"><p lang="en" dir="ltr">I&#39;m genuinely amazed by the projects in the <a href="https://twitter.com/ArweaveTeam?ref_src=twsrc%5Etfw">@ArweaveTeam</a> <a href="https://twitter.com/gitcoin?ref_src=twsrc%5Etfw">@gitcoin</a> incubator, these are incredible. Wow.</p>&mdash; Lasse Clausen (@lalleclausen) <a href="https://twitter.com/lalleclausen/status/1294320204470722560?ref_src=twsrc%5Etfw">August 14, 2020</a></blockquote>
<script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
</div>
</div>
Expand Down
34 changes: 34 additions & 0 deletions app/dashboard/migrations/0194_auto_20211029_1748.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Generated by Django 2.2.24 on 2021-10-29 17:48

from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('dashboard', '0193_tip_value_in_usdt'),
]

operations = [
migrations.AlterModelOptions(
name='searchhistory',
options={'verbose_name_plural': 'Bounties'},
),
migrations.AlterField(
model_name='searchhistory',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default=dict),
),
migrations.AlterField(
model_name='searchhistory',
name='ip_address',
field=models.GenericIPAddressField(blank=True, db_index=True, null=True),
),
migrations.AlterIndexTogether(
name='searchhistory',
index_together={('data', 'search_type', 'ip_address'), ('data', 'search_type', 'ip_address', 'user')},
),
]
13 changes: 11 additions & 2 deletions app/dashboard/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4963,8 +4963,17 @@ class Meta:

search_type = models.CharField(max_length=50, db_index=True)
user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True)
data = JSONField(default=dict)
ip_address = models.GenericIPAddressField(blank=True, null=True)
data = JSONField(default=dict, db_index=True)
ip_address = models.GenericIPAddressField(blank=True, null=True, db_index=True)

class Meta:
"""Define metadata associated with Bounty."""

verbose_name_plural = 'Bounties'
index_together = [
["data", "search_type", "ip_address"],
["data", "search_type", "ip_address", "user"],
]


class BlockedUser(SuperModel):
Expand Down
31 changes: 30 additions & 1 deletion app/dashboard/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
from app.utils import get_location_from_ip
from celery import app, group
from celery.utils.log import get_task_logger
from dashboard.models import Activity, Bounty, ObjectView, Profile, UserAction
from dashboard.models import Activity, Bounty, Earning, ObjectView, Profile, TransactionHistory, UserAction
from dashboard.utils import get_tx_status_and_details
from economy.models import EncodeAnything
from marketing.mails import func_name, grant_update_email, send_mail
from proxy.views import proxy_view
from retail.emails import render_share_bounty
Expand Down Expand Up @@ -409,3 +411,30 @@ def record_join(self, profile_pk, retry: bool = True) -> None:
Activity.objects.create(profile=profile, activity_type='joined')
except Exception as e:
logger.exception(e)


@app.shared_task(bind=True, max_retries=3)
def save_tx_status_and_details(self, earning_pk, chain='std'):
"""
:param self: Self
:param txid: transaction id
:param network: the network to pass to web3
:param created_on: time used to detect if the tx was dropped
:param chain: chain to pass to web3
:return: None
"""
earning = Earning.objects.filter(pk=earning_pk).first()
txid = earning.txid
network = earning.network
created_on = earning.created_on
status, timestamp, tx = get_tx_status_and_details(txid, network, created_on, chain)
if status not in ['unknown', 'pending']:
tx = tx if tx else {}
TransactionHistory.objects.create(
earning=earning,
status=status,
payload=json.loads(json.dumps(dict(tx), cls=EncodeAnything)),
network=network,
txid=txid,
captured_at=timezone.now(),
)
4 changes: 4 additions & 0 deletions app/grants/management/commands/payout_round_noncustodial.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,10 @@ def chunks(lst, n):
# Convert to human units
total_dai_required = total_dai_required_wei / SCALE

# TODO: REMOVE THIS AFTER ROUND 11 PAYOUT
# THIS IS DUE TO SECOND CONTRACT DEPLOY FOR PAYOUT
expected_total_dai_amount = total_dai_required

# Verify that total DAI required (from event logs) equals the expected amount
if math.floor(expected_total_dai_amount) != math.floor(total_dai_required):
print('\n* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *')
Expand Down
2 changes: 1 addition & 1 deletion app/grants/models/grant.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ class Meta:
)

def __str__(self):
return f"{self.round_num}"
return f"pk:{self.pk}, round_num: {self.round_num}"

@property
def happening_now(self):
Expand Down
2 changes: 1 addition & 1 deletion app/grants/templates/grants/detail/_index.html
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ <h3 class="pt-2 pb-3">
{% trans "Latest Contributions" %}
</h3>
<h5 class="mb-5 text-muted" v-if="!transactions.grantTransactions?.length && !loadingTx">{% trans "No Activity for this Grant!" %}</h5>
<div class="px-0" v-if="transactions.grantTransactions?.length && !loadingTx">
<div class="px-0" v-if="transactions.grantTransactions?.length">
<div class="mb-5 pb-2 px-0 mx-sm-0">
<div class="col-12 px-0">
<div id="contributions">
Expand Down
30 changes: 18 additions & 12 deletions app/grants/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1545,9 +1545,13 @@ def grant_details_contributors(request, grant_id):
def grant_details_contributions(request, grant_id):
page = int(request.GET.get('page', 1))
network = request.GET.get('network', 'mainnet')
limit = int(request.GET.get('limit', 10))
limit = int(request.GET.get('limit', 100))
max_page_size = 300
if limit > max_page_size:
limit = max_page_size

try:
grant = Grant.objects.prefetch_related('subscriptions').get(
grant = Grant.objects.get(
pk=grant_id
)
except Grant.DoesNotExist:
Expand All @@ -1558,19 +1562,24 @@ def grant_details_contributions(request, grant_id):

_contributions = Contribution.objects.filter(
subscription__grant=grant,
subscription__network=network,
subscription__is_postive_vote=True
).prefetch_related('subscription', 'subscription__contributor_profile')
contributions = list(_contributions.order_by('-created_on'))
)

contributions = _contributions.order_by('-created_on')
# print(contributions)
all_pages = Paginator(contributions, limit)
this_page = all_pages.page(page)
start_index = (page - 1) * limit
end_index = (page) * limit
this_page = contributions[start_index:end_index]
response = dict()

all_contributions = []
for contribution in this_page:
# print(contribution.subscription)
# print(contribution.subscription.tx_id)
subscription = contribution.subscription
if not subscription.is_postive_vote:
continue
if subscription.network != network:
continue

contribution_json = {
k: getattr(contribution, k) for k in
Expand All @@ -1590,10 +1599,7 @@ def grant_details_contributions(request, grant_id):
all_contributions.append(contribution_json)

response['contributions'] = json.loads(json.dumps(all_contributions, default=str))
response['has_next'] = all_pages.page(page).has_next()
response['count'] = all_pages.count
response['num_pages'] = all_pages.num_pages
response['next_page_number'] = all_pages.page(page).next_page_number() if all_pages.page(page).has_next() else None
response['next_page_number'] = page + 1

return JsonResponse(response)

Expand Down
29 changes: 8 additions & 21 deletions app/kudos/management/commands/pull_tx_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,32 +22,19 @@
from django.core.management.base import BaseCommand
from django.utils import timezone

from dashboard.models import Earning, TransactionHistory
from dashboard.utils import get_tx_status_and_details
from economy.models import EncodeAnything
from dashboard.models import Earning
from dashboard.tasks import save_tx_status_and_details


class Command(BaseCommand):

help = 'pulls tx statuses and stores them in the DB to be queried later'

def handle(self, *args, **options):
earnings = Earning.objects.all().order_by('-pk')
earnings = Earning.objects.filter(history=None).order_by('-pk')
for earning in earnings:
if earning.history.count():
continue
txid = earning.txid
network = earning.network
created_on = earning.created_on
status, timestamp, tx = get_tx_status_and_details(txid, network, created_on)
print(earning.pk, status)
if status not in ['unknown', 'pending']:
tx = tx if tx else {}
TransactionHistory.objects.create(
earning=earning,
status=status,
payload=json.loads(json.dumps(dict(tx), cls=EncodeAnything)),
network=network,
txid=txid,
captured_at=timezone.now(),
)
# defer the op to celery
try:
save_tx_status_and_details.delay(earning.pk)
except:
print(f'failed to enqueue: {earning.pk}')
4 changes: 2 additions & 2 deletions app/retail/templates/results.html
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,8 @@ <h2>Gitcoin Grants: The Growth Engine that Could</h2>
</div>

<div class="row mt-2 pb-2 text-center">
<div class="col">
<iframe src="https://metabase.gitcoin.co/public/question/437a0ed4-bcb7-41eb-af63-e67c0965d200" frameborder="0" width="100%" height="600" allowtransparency></iframe>
<div class="col" id="grants_results">
.... loading ....
</div>
</div>

Expand Down
9 changes: 5 additions & 4 deletions scripts/backup.bash
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,16 @@ export HOST=$(cat app/app/.env | grep "DATABASE_URL" | grep -v REPLICA | awk -F
IS_PROD=$(cat app/app/.env | grep ENV | grep prod | wc -l)
if [ "$IS_PROD" -eq "1" ]; then
# schema
$PG_DUMP gitcoin -U gitcoin -h $HOST --schema-only | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/create-$BACKUPSTR-$(hostname).sql
$PG_DUMP gitcoin -U gitcoin -h $HOST --schema-only | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/create-$BACKUPSTR-$(hostname).sql --multipart-chunk-size-mb=500

# lite backup
$PG_DUMP gitcoin -U gitcoin -h $HOST --data-only --exclude-table=marketing_emailevent --exclude-table=dashboard_objectview --exclude-table=marketing_stat --exclude-table=gas_gasprofile --exclude-table=marketing_githubevent --exclude-table=gas_gasguzzler --exclude-table=marketing_slackpresence | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/litedata-$BACKUPSTR-$(hostname).sql
$PG_DUMP gitcoin -U gitcoin -h $HOST --data-only --exclude-table=marketing_emailevent --exclude-table=marketing_emailevent2 --exclude-table=dashboard_objectview --exclude-table=marketing_stat --exclude-table=gas_gasprofile --exclude-table=marketing_githubevent --exclude-table=gas_gasguzzler --exclude-table=marketing_slackpresence | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/litedata-$BACKUPSTR-$(hostname).sql --multipart-chunk-size-mb=500

# minimal backup
$PG_DUMP gitcoin -U gitcoin -h $HOST --data-only --exclude-table=marketing_emailevent --exclude-table=dashboard_objectview --exclude-table=marketing_stat --exclude-table=gas_gasprofile --exclude-table=marketing_githubevent --exclude-table=gas_gasguzzler --exclude-table=marketing_slackpresence --exclude-table=dashboard_activity --exclude-table=marketing_leaderboardrank --exclude-table=dashboard_activity --exclude-table=perftools_jsonstore --exclude-table=pg_toast --exclude-table=dashboard_searchhistory --exclude-table=townsquare_matchranking --exclude-table=dashboard_useraction --exclude-table=grants_cartactivity --exclude-table=dashboard_profilestathistory --exclude-table=marketing_githubevent --exclude-table=django_session --exclude-table=search_searchresult | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/minimal-$BACKUPSTR-$(hostname).sql
$PG_DUMP gitcoin -U gitcoin -h $HOST --data-only --exclude-table=grants_grantstat --exclude-table=marketing_emailevent --exclude-table=marketing_emailevent2 --exclude-table=inbox_notification --exclude-table=dashboard_objectview --exclude-table=marketing_stat --exclude-table=gas_gasprofile --exclude-table=marketing_githubevent --exclude-table=gas_gasguzzler --exclude-table=marketing_slackpresence --exclude-table=dashboard_activity --exclude-table=marketing_leaderboardrank --exclude-table=dashboard_activity --exclude-table=perftools_jsonstore --exclude-table=pg_toast --exclude-table=dashboard_searchhistory --exclude-table=townsquare_matchranking --exclude-table=dashboard_useraction --exclude-table=grants_cartactivity --exclude-table=dashboard_profilestathistory --exclude-table=marketing_githubevent --exclude-table=django_session --exclude-table=search_searchresult | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/minimal-$BACKUPSTR-$(hostname).sql --multipart-chunk-size-mb=500

# full backup
$PG_DUMP gitcoin -U gitcoin -h $HOST | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/full-$BACKUPSTR-$(hostname).sql
$PG_DUMP gitcoin -U gitcoin -h $HOST | s3cmd put - s3://gitcoinbackups/$YEAR/$MONTH/$DAY/full-$BACKUPSTR-$(hostname).sql --multipart-chunk-size-mb=500


else
Expand Down
6 changes: 5 additions & 1 deletion scripts/deploy.bash
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,11 @@ if ! [ "$JOBS_NODE" ]; then
sleep 1.5
done

if [ $(pgrep -fl "gunicorn: worke" | wc -l) -eq "0" ]; then
# restart gunicorn
if [ $(pgrep -fl "supervisor" | wc -l) -eq "0" ] && [ $(pgrep -fl "gunicorn: worke" | wc -l) -eq "0" ]; then
echo "- RESTART gitcoin-gunicorn"
sudo supervisorctl restart gitcoin_gunicorn
elif [ $(pgrep -fl "gunicorn: worke" | wc -l) -eq "0" ]; then
echo "- RESTART gunicorn"
sudo systemctl restart gunicorn
fi
Expand Down

0 comments on commit d2e56c3

Please sign in to comment.